1
0
Fork 0

Summarization

This commit is contained in:
Pabloader 2026-03-25 13:22:17 +00:00
parent 5a6897f1f6
commit a605c95890
7 changed files with 161 additions and 172 deletions

View File

@ -133,10 +133,35 @@
}
.tokenCounter {
display: flex;
align-items: center;
gap: 8px;
font-size: 11px;
color: var(--text-muted);
}
.summarizeButton {
display: flex;
align-items: center;
justify-content: center;
padding: 4px;
color: var(--text-muted);
background: transparent;
border: 1px solid var(--border);
border-radius: 4px;
cursor: pointer;
&:hover:not(:disabled) {
color: var(--text);
border-color: var(--text-muted);
}
&:disabled {
opacity: 0.4;
cursor: default;
}
}
.toggleContainer {
display: flex;
align-items: center;

View File

@ -17,15 +17,6 @@
text-align: center;
}
.summarizing {
display: block;
font-family: sans-serif;
font-size: 12px;
font-weight: normal;
font-style: italic;
color: var(--text-muted);
margin-top: 4px;
}
.content {
flex: 1;

View File

@ -2,11 +2,13 @@ import { useInputState } from "@common/hooks/useInputState";
import { highlight } from "@common/highlight";
import { Sidebar } from "./sidebar";
import { useAppState, type ChatMessage } from "../contexts/state";
import { useChapterSummarization } from "../utils/useChapterSummarization";
import styles from '../assets/chat-sidebar.module.css';
import { useState, useRef, useEffect, useMemo, useCallback } from "preact/hooks";
import LLM from "../utils/llm";
import Prompt from "../utils/prompt";
import { Tools } from "../utils/tools";
import { Sparkles } from "lucide-preact";
import clsx from "clsx";
// ─── Role Header ──────────────────────────────────────────────────────────────
@ -41,6 +43,7 @@ const RoleHeader = ({ message, chatMessages }: RoleHeaderProps) => {
export const ChatSidebar = () => {
const appState = useAppState();
const { currentStory, dispatch, connection, model, enableThinking } = appState;
const { summarizeAll, isSummarizing } = useChapterSummarization();
const [input, setInput] = useInputState('');
const [isLoading, setIsLoading] = useState(false);
const [isCollapsed, setCollapsed] = useState(false);
@ -336,11 +339,16 @@ export const ChatSidebar = () => {
/>
<span>Enable thinking</span>
</label>
{tokenCount && (
<div class={styles.tokenCounter}>
{tokenCount.taken} / {tokenCount.total} tokens
</div>
)}
<div class={styles.tokenCounter}>
{tokenCount && <span>{tokenCount.taken} / {tokenCount.total} tokens</span>}
<button
class={styles.summarizeButton}
onClick={summarizeAll}
disabled={isSummarizing || !currentStory || !connection || !model}
title={isSummarizing ? 'Summarizing...' : 'Summarize'}>
<Sparkles size={14} />
</button>
</div>
</div>
<textarea
class={styles.input}

View File

@ -1,11 +1,9 @@
import { ContentEditable } from "@common/components/ContentEditable";
import { highlight } from "@common/highlight";
import { useAppState, type Tab } from "../contexts/state";
import { useChapterSummarization } from "../utils/useChapterSummarization";
import styles from '../assets/editor.module.css';
import { useMemo } from "preact/hooks";
import clsx from "clsx";
import { Pause, Play } from "lucide-preact";
import { CharacterEditor } from "./character-editor";
import { LocationEditor } from "./location-editor";
import { ChaptersEditor } from "./chapters-editor";
@ -19,8 +17,7 @@ const TABS: { id: Tab; label: string }[] = [
];
export const Editor = () => {
const { currentStory, summarizationPaused, dispatch } = useAppState();
const { pendingCount } = useChapterSummarization();
const { currentStory, dispatch } = useAppState();
if (!currentStory) {
return <div class={styles.editor} />;
@ -59,9 +56,6 @@ export const Editor = () => {
<div class={styles.editor}>
<div class={styles.title}>
{currentStory.title}
{pendingCount > 0 && (
<span class={styles.summarizing}>Summarizing ({pendingCount})</span>
)}
</div>
<div class={styles.content}>
{currentStory.currentTab === "story" && (
@ -100,15 +94,6 @@ export const Editor = () => {
{tab.label}
</button>
))}
<button
class={clsx(styles.tab, styles.tabRight, summarizationPaused && styles.active)}
onClick={() => dispatch({ type: 'SET_SUMMARIZATION_PAUSED', paused: !summarizationPaused })}
>
{summarizationPaused
? <><Play size={14} /> Summarization paused</>
: <><Pause size={14} /> Pause summarization</>
}
</button>
</div>
</div>
);

View File

@ -68,7 +68,6 @@ interface IState {
enableThinking: boolean;
bannedTokens: string[];
systemInstruction: string;
summarizationPaused: boolean;
}
// ─── Actions ─────────────────────────────────────────────────────────────────
@ -97,7 +96,6 @@ type Action =
| { type: 'ADD_LOCATION'; storyId: string; location: Location }
| { type: 'EDIT_LOCATION'; storyId: string; locationId: string; updates: Partial<Location> }
| { type: 'DELETE_LOCATION'; storyId: string; locationId: string }
| { type: 'SET_SUMMARIZATION_PAUSED'; paused: boolean }
| { type: 'STORE_CHAPTER_SUMMARY'; storyId: string; header: string; hash: Chapters.Hash; summary: string }
| { type: 'CLEAN_CHAPTER_SUMMARIES'; storyId: string; validHashes: Record<string, Chapters.Hash[]> };
@ -111,7 +109,6 @@ const DEFAULT_STATE: IState = {
enableThinking: false,
bannedTokens: [],
systemInstruction: `You are a creative writing assistant. Help the user develop their story by writing engaging content, maintaining consistency with the established characters, settings, and plot. Follow the user's instructions while staying true to the story's tone and style.`,
summarizationPaused: false,
};
// ─── Reducer ─────────────────────────────────────────────────────────────────
@ -388,9 +385,6 @@ function reducer(state: IState, action: Action): IState {
}),
};
}
case 'SET_SUMMARIZATION_PAUSED': {
return { ...state, summarizationPaused: action.paused };
}
case 'STORE_CHAPTER_SUMMARY': {
return {
...state,
@ -423,7 +417,6 @@ export interface AppState {
enableThinking: boolean;
bannedTokens: string[];
systemInstruction: string;
summarizationPaused: boolean;
dispatch: (action: Action) => void;
}
@ -444,7 +437,6 @@ export const StateContextProvider = ({ children }: { children?: any }) => {
enableThinking: state.enableThinking,
bannedTokens: state.bannedTokens ?? [],
systemInstruction: state.systemInstruction ?? '',
summarizationPaused: state.summarizationPaused ?? false,
dispatch,
}), [state]);

View File

@ -1,4 +1,5 @@
import LLM from "./llm";
import Chapters from "./chapters";
import { type AppState, LocationScale } from "../contexts/state";
import { Tools } from "./tools";
@ -7,56 +8,106 @@ namespace Prompt {
return text.length / 3;
}
export function formatStoryText(text: string, tokenBudget: number): string {
const KEEP_RECENT_CHUNKS = 2;
interface ChunkSlot {
header: string;
body: string;
summary: string | null;
mode: 'full' | 'summary' | 'omitted';
}
function buildSlots(text: string, chapters: Chapters.Chapter[]): ChunkSlot[] {
const parsed = Chapters.parseText(text);
const slots: ChunkSlot[] = [];
for (const parsedChapter of parsed) {
const cachedChapter = chapters.find(c => c.header === parsedChapter.header)
?? Chapters.emptyChapter(parsedChapter.header);
const chunks = Chapters.splitIntoChunks(parsedChapter.body);
for (const body of chunks) {
const { summary } = Chapters.lookupSummary(cachedChapter, body);
slots.push({ header: parsedChapter.header, body, summary, mode: 'full' });
}
}
return slots;
}
function countSlotTokens(slots: ChunkSlot[]): number {
let total = 0;
const countedHeaders = new Set<string>();
for (const slot of slots) {
if (slot.mode === 'omitted') continue;
if (slot.header && !countedHeaders.has(slot.header)) {
total += approxTokens(slot.header);
countedHeaders.add(slot.header);
}
total += approxTokens(slot.mode === 'summary' ? (slot.summary ?? '') : slot.body);
}
return total;
}
function renderSlots(slots: ChunkSlot[]): string {
const parts: string[] = [];
const shownHeaders = new Set<string>();
for (const slot of slots) {
const lines: string[] = [];
if (slot.header && !shownHeaders.has(slot.header)) {
lines.push(slot.header);
shownHeaders.add(slot.header);
}
const content = slot.mode === 'omitted' ? '[...]'
: slot.mode === 'summary' ? `[Summary: ${slot.summary}]`
: slot.body;
lines.push(content);
parts.push(lines.join('\n\n'));
}
return parts.join('\n\n');
}
export function formatStoryChunks(
text: string,
chapters: Chapters.Chapter[],
tokenBudget: number,
): string {
if (!text) return '';
if (approxTokens(text) <= tokenBudget / 2) {
return text;
const slots = buildSlots(text, chapters);
if (slots.length === 0) return '';
if (countSlotTokens(slots) <= tokenBudget) {
return renderSlots(slots);
}
const lines = text.split('\n');
const separator = '[...]';
// Max chars for content = half-budget tokens * 3 chars/token, minus separator overhead
const targetChars = Math.floor(tokenBudget / 2 * 3) - separator.length - 2;
const recentStart = Math.max(0, slots.length - KEEP_RECENT_CHUNKS);
if (targetChars <= 0) {
return separator;
// Phase 1: summarize non-recent chunks, stop as soon as we fit
for (let i = 0; i < recentStart; i++) {
if (slots[i].summary) {
slots[i].mode = 'summary';
if (countSlotTokens(slots) <= tokenBudget) return renderSlots(slots);
}
}
// 1/3 of budget for start, 2/3 for end
const startCharsMax = Math.floor(targetChars / 3);
const endCharsMax = targetChars - startCharsMax;
// Phase 2: delete from middle outward, never delete last slot
const middle = recentStart / 2;
const deletable = Array.from({ length: recentStart }, (_, i) => i)
.sort((a, b) => Math.abs(a - middle) - Math.abs(b - middle));
let startCharsUsed = 0;
let startEnd = 0;
for (let i = 0; i < lines.length; i++) {
const lineLen = lines[i].length + 1; // +1 for '\n'
if (startCharsUsed + lineLen > startCharsMax) break;
startCharsUsed += lineLen;
startEnd = i + 1;
for (const i of deletable) {
slots[i].mode = 'omitted';
if (countSlotTokens(slots) <= tokenBudget) break;
}
let endCharsUsed = 0;
let endStart = lines.length;
for (let i = lines.length - 1; i >= startEnd; i--) {
const lineLen = lines[i].length + 1;
if (endCharsUsed + lineLen > endCharsMax) break;
endCharsUsed += lineLen;
endStart = i;
}
if (startEnd >= endStart) {
return text; // All lines fit after all
}
const startPart = lines.slice(0, startEnd).join('\n');
const endPart = lines.slice(endStart).join('\n');
const parts: string[] = [];
if (startPart) parts.push(startPart);
parts.push(separator);
if (endPart) parts.push(endPart);
return parts.join('\n');
return renderSlots(slots);
}
export function formatCharactersMarkdown(state: AppState): string {
@ -138,7 +189,7 @@ namespace Prompt {
}
if (currentStory.text && storyTokenBudget > 0) {
const storyText = formatStoryText(currentStory.text, storyTokenBudget);
const storyText = formatStoryChunks(currentStory.text, currentStory.chapters ?? [], storyTokenBudget);
if (storyText) {
parts.push(`## Story\n${storyText}`);
}

View File

@ -1,87 +1,22 @@
import { useEffect, useRef, useState } from 'preact/hooks';
import { useRef, useState } from 'preact/hooks';
import { useAppState, type AppState } from '../contexts/state';
import Chapters from './chapters';
import LLM from './llm';
interface SummarizationJob {
storyId: string;
header: string;
index: number;
body: string;
hash: Chapters.Hash;
}
const DEBOUNCE_MS = 2000;
export function useChapterSummarization() {
const state = useAppState();
// Always-fresh ref so async processQueue reads current connection/model/dispatch
const stateRef = useRef<AppState>(state);
stateRef.current = state;
const queueRef = useRef<SummarizationJob[]>([]);
const processingRef = useRef(false);
const debounceRef = useRef<ReturnType<typeof setTimeout> | null>(null);
const [pendingCount, setPendingCount] = useState(0);
const [isSummarizing, setIsSummarizing] = useState(false);
const processQueue = async () => {
if (processingRef.current) return;
processingRef.current = true;
const summarizeAll = async () => {
const { currentStory, connection, model, dispatch } = stateRef.current;
if (!currentStory || !connection || !model || isSummarizing) return;
setIsSummarizing(true);
try {
while (queueRef.current.length > 0) {
const { connection, model, dispatch, summarizationPaused } = stateRef.current;
if (!connection || !model || summarizationPaused) break;
const job = queueRef.current[0];
setPendingCount(queueRef.current.length);
queueRef.current = queueRef.current.slice(1);
try {
const summary = await LLM.summarize(connection, model.id, job.body);
dispatch({
type: 'STORE_CHAPTER_SUMMARY',
storyId: job.storyId,
header: job.header,
hash: job.hash,
summary,
});
} catch {
// skip failed job, continue with rest
}
}
} finally {
processingRef.current = false;
setPendingCount(0);
}
};
const enqueue = (jobs: SummarizationJob[]) => {
for (const job of jobs) {
const idx = queueRef.current.findIndex(
j => j.header === job.header && j.index === job.index
);
if (idx !== -1) {
queueRef.current[idx] = job;
} else {
queueRef.current.push(job);
}
}
setPendingCount(queueRef.current.length);
processQueue();
};
// Re-scan when text changes (debounced)
useEffect(() => {
if (debounceRef.current) clearTimeout(debounceRef.current);
debounceRef.current = setTimeout(() => {
const { currentStory } = stateRef.current;
if (!currentStory?.text) return;
const parsed = Chapters.parseText(currentStory.text);
const jobs: SummarizationJob[] = [];
const validHashes: Record<string, Chapters.Hash[]> = {};
for (const parsedChapter of parsed) {
@ -92,32 +27,34 @@ export function useChapterSummarization() {
validHashes[parsedChapter.header] = [];
for (let i = 0; i < chunks.length; i++) {
const { hash, summary } = Chapters.lookupSummary(cachedChapter, chunks[i]);
validHashes[parsedChapter.header].push(hash);
for (const body of chunks) {
const { hash, summary } = Chapters.lookupSummary(cachedChapter, body);
if (summary === null) {
jobs.push({ storyId: currentStory.id, header: parsedChapter.header, index: i, body: chunks[i], hash });
const newSummary = await LLM.summarize(connection, model.id, body);
dispatch({
type: 'STORE_CHAPTER_SUMMARY',
storyId: currentStory.id,
header: parsedChapter.header,
hash,
summary: newSummary,
});
}
validHashes[parsedChapter.header].push(hash);
}
}
stateRef.current.dispatch({ type: 'CLEAN_CHAPTER_SUMMARIES', storyId: currentStory.id, validHashes });
if (jobs.length > 0) {
enqueue(jobs);
}
}, DEBOUNCE_MS);
return () => {
if (debounceRef.current) clearTimeout(debounceRef.current);
};
}, [state.currentStory?.text]);
// Resume processing if connection/model become available or summarization is unpaused
useEffect(() => {
if (queueRef.current.length > 0) {
processQueue();
// Clean up stale cache entries
dispatch({
type: 'CLEAN_CHAPTER_SUMMARIES',
storyId: currentStory.id,
validHashes,
});
} finally {
setIsSummarizing(false);
}
}, [state.connection, state.model, state.summarizationPaused]);
};
return { pendingCount };
return { summarizeAll, isSummarizing };
}