1
0
Fork 0

Compare commits

..

2 Commits

Author SHA1 Message Date
Pabloader d1325b33b7 System prompt 2026-03-23 21:58:21 +00:00
Pabloader c2c55eb820 Format system prompt 2026-03-23 21:39:21 +00:00
12 changed files with 554 additions and 269 deletions

View File

@ -188,6 +188,21 @@
}
}
.stopButton {
padding: 8px 16px;
font-size: 13px;
font-weight: bold;
color: var(--bg);
background: var(--error, #f44336);
border: none;
border-radius: 4px;
cursor: pointer;
&:hover {
background: #d32f2f;
}
}
.clearButton {
padding: 8px 16px;
font-size: 12px;

View File

@ -56,6 +56,37 @@
}
}
.tabs {
display: flex;
border-bottom: 1px solid var(--border);
padding: 0 20px;
gap: 8px;
}
.tab {
display: flex;
align-items: center;
gap: 6px;
padding: 12px 16px;
background: transparent;
border: none;
border-bottom: 2px solid transparent;
color: var(--text-muted);
cursor: pointer;
font-size: 14px;
transition: all 0.2s;
&:hover {
color: var(--text);
background: var(--bg-hover);
}
&.active {
color: var(--accent);
border-bottom-color: var(--accent);
}
}
.content {
flex: 1;
overflow-y: auto;
@ -80,7 +111,8 @@
}
.input,
.select {
.select,
.textarea {
width: 100%;
padding: 8px;
border-radius: 4px;
@ -89,6 +121,13 @@
color: var(--text);
}
.textarea {
resize: vertical;
font-family: inherit;
font-size: inherit;
line-height: 1.5;
}
.footer {
padding: 16px 20px;
border-top: 1px solid var(--border);

View File

@ -1,98 +0,0 @@
import clsx from "clsx";
import { useInputState } from "@common/hooks/useInputState";
import { useAppState } from "../contexts/state";
import styles from "../assets/settings-modal.module.css";
import { X } from "lucide-preact";
interface Props {
onClose: () => void;
}
export const BannedTokensModal = ({ onClose }: Props) => {
const { bannedTokens, dispatch } = useAppState();
const [inputValue, setInputValue] = useInputState();
const handleAdd = () => {
const trimmed = inputValue.trim();
if (trimmed && !bannedTokens.includes(trimmed)) {
dispatch({
type: "SET_BANNED_TOKENS",
tokens: [...bannedTokens, trimmed],
});
setInputValue("");
}
};
const handleRemove = (token: string) => {
dispatch({
type: "SET_BANNED_TOKENS",
tokens: bannedTokens.filter((t) => t !== token),
});
};
const handleKeyDown = (e: KeyboardEvent) => {
if (e.key === "Enter") {
handleAdd();
} else if (e.key === "Escape") {
onClose();
}
};
const sortedTokens = [...bannedTokens].sort((a, b) =>
a.trim().toLowerCase().localeCompare(b.trim().toLowerCase())
);
return (
<div class={styles.overlay} onClick={onClose}>
<div class={styles.modal} onClick={(e) => e.stopPropagation()}>
<div class={styles.header}>
<h2 class={styles.title}>Banned Tokens</h2>
<button class={styles.closeButton} onClick={onClose}>
<X size={20} />
</button>
</div>
<div class={styles.content}>
<div class={styles.inputRow}>
<input
type="text"
value={inputValue}
onInput={setInputValue}
onKeyDown={handleKeyDown}
placeholder="Token to ban"
class={styles.input}
autoFocus
/>
<button onClick={handleAdd} class={clsx(styles.button, styles.buttonPrimary)}>
Add
</button>
</div>
<div class={styles.divider} />
<div class={styles.tokenList}>
{sortedTokens.length === 0 ? (
<p class={styles.emptyText}>No banned tokens</p>
) : (
sortedTokens.map((token) => (
<div key={token} class={styles.tokenItem}>
<span>{token}</span>
<button
onClick={() => handleRemove(token)}
class={styles.tokenRemoveButton}
>
<X size={12} />
</button>
</div>
))
)}
</div>
</div>
<div class={styles.footer}>
<button onClick={onClose} class={clsx(styles.button, styles.buttonSecondary)}>
Done
</button>
</div>
</div>
</div>
);
};

View File

@ -1,12 +1,14 @@
import { useAppState, type Character } from "../contexts/state";
import { useState } from "preact/hooks";
import styles from '../assets/character-editor.module.css';
import LLM from "../utils/llm";
export const CharacterEditor = () => {
const { currentStory, dispatch } = useAppState();
const { currentStory, dispatch, connection, model } = useAppState();
const [newNickname, setNewNickname] = useState<Record<string, string>>({});
const [newRelation, setNewRelation] = useState<Record<string, { name: string; relation: string }>>({});
const [showDeleteConfirm, setShowDeleteConfirm] = useState<string | null>(null);
const [generatingShortDesc, setGeneratingShortDesc] = useState<string | null>(null);
if (!currentStory) {
return null;
@ -99,6 +101,23 @@ export const CharacterEditor = () => {
setNewRelation({ ...newRelation, [characterId]: { ...current, [field]: value } });
};
const handleGenerateShortDescription = async (characterId: string) => {
if (!connection || !model) return;
const character = currentStory.characters.find(c => c.id === characterId);
if (!character || !character.description.trim()) return;
setGeneratingShortDesc(characterId);
try {
const shortDesc = await LLM.summarize(connection, model.id, character.description, 'sentence');
handleEditCharacter(characterId, 'shortDescription', shortDesc.trim());
} catch (error) {
console.error('Failed to generate short description:', error);
} finally {
setGeneratingShortDesc(null);
}
};
return (
<div class={styles.characterEditor}>
<div class={styles.header}>
@ -167,8 +186,12 @@ export const CharacterEditor = () => {
<div class={styles.field}>
<div class={styles.label}>
Short Description
<button class={styles.generateButton}>
Generate
<button
class={styles.generateButton}
onClick={() => handleGenerateShortDescription(character.id)}
disabled={!character.description.trim() || generatingShortDesc === character.id || !connection || !model}
>
{generatingShortDesc === character.id ? 'Generating...' : 'Generate'}
</button>
</div>
<textarea

View File

@ -151,6 +151,9 @@ export const ChatSidebar = () => {
let tool_calls: LLM.ToolCall[] | undefined;
for await (const chunk of LLM.generateStream(connection, request)) {
if (abortControllerRef.current?.signal.aborted) {
break;
}
const delta = chunk.choices[0]?.delta;
if (delta?.tool_calls) {
@ -178,9 +181,6 @@ export const ChatSidebar = () => {
},
});
}
if (abortControllerRef.current?.signal.aborted) {
break;
}
}
const assistantMessage: ChatMessage = {
id: assistantMessageId,
@ -236,15 +236,20 @@ export const ChatSidebar = () => {
setInput('');
setIsLoading(true);
setError(null);
abortControllerRef.current = new AbortController();
try {
await sendMessage([userMessage]);
} finally {
setIsLoading(false);
abortControllerRef.current = new AbortController();
}
}, [currentStory, input, connection, model, isLoading, sendMessage]);
const handleStopGeneration = useCallback(() => {
abortControllerRef.current?.abort();
setIsLoading(false);
}, []);
const handleKeyDown = (e: KeyboardEvent) => {
if (e.key === 'Enter' && !e.shiftKey) {
e.preventDefault();
@ -346,13 +351,22 @@ export const ChatSidebar = () => {
rows={3}
disabled={isDisabled}
/>
{isLoading ? (
<button
class={styles.stopButton}
onClick={handleStopGeneration}
>
Stop
</button>
) : (
<button
class={styles.sendButton}
onClick={handleSendMessage}
disabled={isDisabled || !input.trim()}
>
{isLoading ? 'Sending...' : 'Send'}
Send
</button>
)}
</div>
)}
</div>

View File

@ -0,0 +1,185 @@
import { useMemo, useRef } from "preact/hooks";
import { useQuery } from "@common/hooks/useAsyncState";
import { useInputState } from "@common/hooks/useInputState";
import { useUpdate } from "@common/hooks/useUpdate";
import { useAppState } from "../contexts/state";
import LLM from "../utils/llm";
import styles from "../assets/settings-modal.module.css";
import { X } from "lucide-preact";
interface Props {
onClose: () => void;
}
export const ConnectionSettingsModal = ({ onClose }: Props) => {
const { connection, model, dispatch } = useAppState();
const [url, setUrl] = useInputState(connection?.url ?? "");
const [apiKey, setApiKey] = useInputState(connection?.apiKey ?? "");
const [selectedModel, setSelectedModel] = useInputState(model?.id ?? "");
const [update, triggerFetch] = useUpdate();
const urlRef = useRef(url);
const apiKeyRef = useRef(apiKey);
urlRef.current = url;
apiKeyRef.current = apiKey;
const connectionToFetch = useMemo<LLM.Connection | null>(() => {
const currentUrl = urlRef.current;
const currentApiKey = apiKeyRef.current;
if (!currentUrl || !currentApiKey) return null;
return { url: currentUrl, apiKey: currentApiKey };
}, [update]);
const fetchModels = useMemo(() => async (conn: LLM.Connection | null) => {
if (!conn) return [];
const r = await LLM.getModels(conn);
return r.data;
}, []);
const modelsData = useQuery(fetchModels, connectionToFetch);
const isLoadingModels = connectionToFetch != null && modelsData == undefined;
const groupedModels = useMemo(() => {
const sorted = (modelsData ?? []).sort((a, b) => {
const aWeight = Number(a.support_tools) * 2 + Number(a.support_thinking);
const bWeight = Number(b.support_tools) * 2 + Number(b.support_thinking);
if (aWeight !== bWeight) {
return bWeight - aWeight;
}
const aContext = a.max_context ?? 0;
const bContext = b.max_context ?? 0;
if (aContext !== bContext) {
return bContext - aContext;
}
return a.id.localeCompare(b.id);
});
// Group by context size
const groups = Map.groupBy(sorted, m => m.max_context ?? 0);
// Convert to array sorted by context size (bigger first)
return Array.from(groups.entries())
.sort((a, b) => b[0] - a[0])
.map(([context, models]) => ({ context, models }));
}, [modelsData]);
const handleBlur = () => {
if (url && apiKey) {
triggerFetch();
}
};
const handleKeyDown = (e: KeyboardEvent) => {
if (e.key === 'Enter' && url && apiKey) {
triggerFetch();
}
};
const handleConfirm = () => {
dispatch({
type: 'SET_CONNECTION',
connection: connectionToFetch,
});
const selectedModelInfo = modelsData?.find(m => m.id === selectedModel) ?? null;
dispatch({
type: 'SET_MODEL',
model: selectedModelInfo,
});
onClose();
};
const connectionToTest = url && apiKey ? { url, apiKey } : null;
return (
<div class={styles.overlay} onClick={onClose}>
<div class={styles.modal} onClick={(e) => e.stopPropagation()}>
<div class={styles.header}>
<h2 class={styles.title}>Connection Settings</h2>
<button class={styles.closeButton} onClick={onClose}>
<X size={20} />
</button>
</div>
<div class={styles.content}>
<div class={styles.form} autocomplete="off">
<div class={styles.formGroup}>
<label class={styles.label}>
API URL
</label>
<input
type="text"
value={url}
onInput={setUrl}
onBlur={handleBlur}
onKeyDown={handleKeyDown}
placeholder="http://localhost:1234"
class={styles.input}
autocomplete="off"
name="api-url-random"
/>
</div>
<div class={styles.formGroup}>
<label class={styles.label}>
API Key
</label>
<input
type="password"
value={apiKey}
onInput={setApiKey}
onBlur={handleBlur}
onKeyDown={handleKeyDown}
placeholder="your-api-key"
class={styles.input}
autocomplete="new-password"
name="api-key-random"
/>
</div>
<div class={styles.formGroup}>
<label class={styles.label}>
Model
</label>
{connectionToTest ? (
isLoadingModels ? (
<p>Loading models...</p>
) : groupedModels.length > 0 ? (
<select
value={selectedModel}
onChange={setSelectedModel}
class={styles.select}
>
<option value="">Select a model</option>
{groupedModels.map(({ context, models }) => (
<optgroup key={context} label={`${context} context`}>
{models.map(m => (
<option key={m.id} value={m.id}>
{m.support_tools ? '🔨' : ''}{m.support_thinking ? '🧠' : ''}{m.id} {m.max_length ? `(len: ${m.max_length})` : ''}
</option>
))}
</optgroup>
))}
</select>
) : (
<p>No models available</p>
)
) : (
<p>Enter connection details to load models</p>
)}
</div>
</div>
</div>
<div class={styles.footer}>
<button onClick={onClose} class={`${styles.button} ${styles.buttonSecondary}`}>
Cancel
</button>
<button onClick={handleConfirm} class={`${styles.button} ${styles.buttonPrimary}`}>
Confirm
</button>
</div>
</div>
</div>
);
};

View File

@ -1,6 +1,7 @@
import { useAppState, type Location, LocationScale } from "../contexts/state";
import { useState } from "preact/hooks";
import styles from '../assets/location-editor.module.css';
import LLM from "../utils/llm";
const SCALE_OPTIONS = Object.entries(LocationScale)
.filter(([, value]) => typeof value === 'number')
@ -10,8 +11,9 @@ const SCALE_OPTIONS = Object.entries(LocationScale)
}));
export const LocationEditor = () => {
const { currentStory, dispatch } = useAppState();
const { currentStory, dispatch, connection, model } = useAppState();
const [showDeleteConfirm, setShowDeleteConfirm] = useState<string | null>(null);
const [generatingShortDesc, setGeneratingShortDesc] = useState<string | null>(null);
if (!currentStory) {
return null;
@ -48,6 +50,23 @@ export const LocationEditor = () => {
});
};
const handleGenerateShortDescription = async (locationId: string) => {
if (!connection || !model) return;
const location = currentStory.locations.find(l => l.id === locationId);
if (!location || !location.description.trim()) return;
setGeneratingShortDesc(locationId);
try {
const shortDesc = await LLM.summarize(connection, model.id, location.description, 'sentence');
handleEditLocation(locationId, 'shortDescription', shortDesc.trim());
} catch (error) {
console.error('Failed to generate short description:', error);
} finally {
setGeneratingShortDesc(null);
}
};
return (
<div class={styles.locationEditor}>
<div class={styles.header}>
@ -131,8 +150,12 @@ export const LocationEditor = () => {
<div class={styles.field}>
<div class={styles.label}>
Short Description
<button class={styles.generateButton}>
Generate
<button
class={styles.generateButton}
onClick={() => handleGenerateShortDescription(location.id)}
disabled={!location.description.trim() || generatingShortDesc === location.id || !connection || !model}
>
{generatingShortDesc === location.id ? 'Generating...' : 'Generate'}
</button>
</div>
<textarea

View File

@ -1,13 +1,13 @@
import clsx from "clsx";
import { Sidebar } from "./sidebar";
import { ConnectionSettingsModal } from "./connection-settings-modal";
import { SettingsModal } from "./settings-modal";
import { BannedTokensModal } from "./banned-tokens-modal";
import { useAppState } from "../contexts/state";
import { useBool } from "@common/hooks/useBool";
import type { Story } from "../contexts/state";
import styles from '../assets/menu-sidebar.module.css';
import { useState } from "preact/hooks";
import { Pencil, X, Plus, Settings, Ban } from "lucide-preact";
import { Pencil, X, Plus, Plug, Settings } from "lucide-preact";
// ─── Story Item ───────────────────────────────────────────────────────────────
@ -82,8 +82,8 @@ const StoryItem = ({ story, active, onSelect, onRename, onDelete }: StoryItemPro
export const MenuSidebar = () => {
const { stories, currentStory, dispatch } = useAppState();
const isConnectionSettingsOpen = useBool(false);
const isSettingsOpen = useBool(false);
const isBannedTokensOpen = useBool(false);
const handleCreate = () => {
dispatch({ type: 'CREATE_STORY', title: 'New Story' });
@ -124,20 +124,20 @@ export const MenuSidebar = () => {
))}
</div>
<div class={styles.bottomButtons}>
<button class={styles.settingsButton} onClick={isBannedTokensOpen.toggle}>
<Ban size={16} /> Banned Tokens
</button>
<button class={styles.settingsButton} onClick={isSettingsOpen.toggle}>
<Settings size={16} /> Settings
</button>
<button class={styles.settingsButton} onClick={isConnectionSettingsOpen.toggle}>
<Plug size={16} /> Connection Settings
</button>
</div>
</div>
{isBannedTokensOpen.value && (
<BannedTokensModal onClose={isBannedTokensOpen.toggle} />
)}
{isSettingsOpen.value && (
<SettingsModal onClose={isSettingsOpen.toggle} />
)}
{isConnectionSettingsOpen.value && (
<ConnectionSettingsModal onClose={isConnectionSettingsOpen.toggle} />
)}
</Sidebar>
);
};

View File

@ -1,99 +1,61 @@
import { useMemo, useRef } from "preact/hooks";
import clsx from "clsx";
import { useMemo, useState } from "preact/hooks";
import { useQuery } from "@common/hooks/useAsyncState";
import { useInputState } from "@common/hooks/useInputState";
import { useUpdate } from "@common/hooks/useUpdate";
import { useAppState } from "../contexts/state";
import LLM from "../utils/llm";
import styles from "../assets/settings-modal.module.css";
import { X } from "lucide-preact";
import { useInputCallback } from "@common/hooks/useInputCallback";
interface Props {
onClose: () => void;
}
type Tab = "banned-tokens" | "system-instruction";
export const SettingsModal = ({ onClose }: Props) => {
const { connection, model, dispatch } = useAppState();
const [url, setUrl] = useInputState(connection?.url ?? "");
const [apiKey, setApiKey] = useInputState(connection?.apiKey ?? "");
const [selectedModel, setSelectedModel] = useInputState(model?.id ?? "");
const [update, triggerFetch] = useUpdate();
const { bannedTokens, systemInstruction, dispatch } = useAppState();
const [inputValue, setInputValue] = useInputState();
const [activeTab, setActiveTab] = useState<Tab>("banned-tokens");
const urlRef = useRef(url);
const apiKeyRef = useRef(apiKey);
urlRef.current = url;
apiKeyRef.current = apiKey;
const connectionToFetch = useMemo<LLM.Connection | null>(() => {
const currentUrl = urlRef.current;
const currentApiKey = apiKeyRef.current;
if (!currentUrl || !currentApiKey) return null;
return { url: currentUrl, apiKey: currentApiKey };
}, [update]);
const fetchModels = useMemo(() => async (conn: LLM.Connection | null) => {
if (!conn) return [];
const r = await LLM.getModels(conn);
return r.data;
// Save system instruction on every change
const setInstructionValue = useInputCallback((instructionValue) => {
dispatch({
type: "SET_SYSTEM_INSTRUCTION",
systemInstruction: instructionValue,
});
}, []);
const modelsData = useQuery(fetchModels, connectionToFetch);
const isLoadingModels = connectionToFetch != null && modelsData == undefined;
const groupedModels = useMemo(() => {
const sorted = (modelsData ?? []).sort((a, b) => {
const aWeight = Number(a.support_tools) * 2 + Number(a.support_thinking);
const bWeight = Number(b.support_tools) * 2 + Number(b.support_thinking);
if (aWeight !== bWeight) {
return bWeight - aWeight;
}
const aContext = a.max_context ?? 0;
const bContext = b.max_context ?? 0;
if (aContext !== bContext) {
return bContext - aContext;
}
return a.id.localeCompare(b.id);
const handleAdd = () => {
const trimmed = inputValue.trim();
if (trimmed && !bannedTokens.includes(trimmed)) {
dispatch({
type: "SET_BANNED_TOKENS",
tokens: [...bannedTokens, trimmed],
});
// Group by context size
const groups = Map.groupBy(sorted, m => m.max_context ?? 0);
// Convert to array sorted by context size (bigger first)
return Array.from(groups.entries())
.sort((a, b) => b[0] - a[0])
.map(([context, models]) => ({ context, models }));
}, [modelsData]);
const handleBlur = () => {
if (url && apiKey) {
triggerFetch();
setInputValue("");
}
};
const handleRemove = (token: string) => {
dispatch({
type: "SET_BANNED_TOKENS",
tokens: bannedTokens.filter((t) => t !== token),
});
};
const handleKeyDown = (e: KeyboardEvent) => {
if (e.key === 'Enter' && url && apiKey) {
triggerFetch();
if (e.key === "Enter") {
handleAdd();
} else if (e.key === "Escape") {
onClose();
}
};
const handleConfirm = () => {
dispatch({
type: 'SET_CONNECTION',
connection: connectionToFetch,
});
const selectedModelInfo = modelsData?.find(m => m.id === selectedModel) ?? null;
dispatch({
type: 'SET_MODEL',
model: selectedModelInfo,
});
onClose();
};
const connectionToTest = url && apiKey ? { url, apiKey } : null;
const sortedTokens = [...bannedTokens].sort((a, b) =>
a.trim().toLowerCase().localeCompare(b.trim().toLowerCase())
);
return (
<div class={styles.overlay} onClick={onClose}>
@ -104,79 +66,76 @@ export const SettingsModal = ({ onClose }: Props) => {
<X size={20} />
</button>
</div>
<div class={styles.tabs}>
<button
class={clsx(styles.tab, activeTab === "banned-tokens" && styles.active)}
onClick={() => setActiveTab("banned-tokens")}
>
Banned Tokens
</button>
<button
class={clsx(styles.tab, activeTab === "system-instruction" && styles.active)}
onClick={() => setActiveTab("system-instruction")}
>
System Instruction
</button>
</div>
<div class={styles.content}>
<div class={styles.form} autocomplete="off">
<div class={styles.formGroup}>
<label class={styles.label}>
API URL
</label>
{activeTab === "banned-tokens" ? (
<>
<div class={styles.inputRow}>
<input
type="text"
value={url}
onInput={setUrl}
onBlur={handleBlur}
value={inputValue}
onInput={setInputValue}
onKeyDown={handleKeyDown}
placeholder="http://localhost:1234"
placeholder="Token to ban"
class={styles.input}
autocomplete="off"
name="api-url-random"
autoFocus
/>
<button onClick={handleAdd} class={clsx(styles.button, styles.buttonPrimary)}>
Add
</button>
</div>
<div class={styles.formGroup}>
<label class={styles.label}>
API Key
</label>
<input
type="password"
value={apiKey}
onInput={setApiKey}
onBlur={handleBlur}
onKeyDown={handleKeyDown}
placeholder="your-api-key"
class={styles.input}
autocomplete="new-password"
name="api-key-random"
/>
</div>
<div class={styles.formGroup}>
<label class={styles.label}>
Model
</label>
{connectionToTest ? (
isLoadingModels ? (
<p>Loading models...</p>
) : groupedModels.length > 0 ? (
<select
value={selectedModel}
onChange={setSelectedModel}
class={styles.select}
<div class={styles.divider} />
<div class={styles.tokenList}>
{sortedTokens.length === 0 ? (
<p class={styles.emptyText}>No banned tokens</p>
) : (
sortedTokens.map((token) => (
<div key={token} class={styles.tokenItem}>
<span>{token}</span>
<button
onClick={() => handleRemove(token)}
class={styles.tokenRemoveButton}
>
<option value="">Select a model</option>
{groupedModels.map(({ context, models }) => (
<optgroup key={context} label={`${context} context`}>
{models.map(m => (
<option key={m.id} value={m.id}>
{m.support_tools ? '🔨' : ''}{m.support_thinking ? '🧠' : ''}{m.id} {m.max_length ? `(len: ${m.max_length})` : ''}
</option>
))}
</optgroup>
))}
</select>
) : (
<p>No models available</p>
)
) : (
<p>Enter connection details to load models</p>
<X size={12} />
</button>
</div>
))
)}
</div>
</>
) : (
<div class={styles.form}>
<div class={styles.formGroup}>
<label class={styles.label}>
System Instruction
</label>
<textarea
value={systemInstruction}
onInput={setInstructionValue}
placeholder="Enter system instruction for the AI assistant..."
class={clsx(styles.input, styles.textarea)}
rows={10}
/>
</div>
</div>
)}
</div>
<div class={styles.footer}>
<button onClick={onClose} class={`${styles.button} ${styles.buttonSecondary}`}>
Cancel
</button>
<button onClick={handleConfirm} class={`${styles.button} ${styles.buttonPrimary}`}>
Confirm
<button onClick={onClose} class={clsx(styles.button, styles.buttonSecondary)}>
Done
</button>
</div>
</div>

View File

@ -65,6 +65,7 @@ interface IState {
model: LLM.ModelInfo | null;
enableThinking: boolean;
bannedTokens: string[];
systemInstruction: string;
}
// ─── Actions ─────────────────────────────────────────────────────────────────
@ -74,6 +75,7 @@ type Action =
| { type: 'RENAME_STORY'; id: string; title: string }
| { type: 'EDIT_STORY'; id: string; text: string }
| { type: 'EDIT_LORE'; id: string; lore: string }
| { type: 'SET_SYSTEM_INSTRUCTION'; systemInstruction: string }
| { type: 'SET_CURRENT_TAB'; id: string; tab: Tab }
| { type: 'DELETE_STORY'; id: string }
| { type: 'SELECT_STORY'; id: string }
@ -102,6 +104,7 @@ const DEFAULT_STATE: IState = {
model: null,
enableThinking: false,
bannedTokens: [],
systemInstruction: `You are a creative writing assistant. Help the user develop their story by writing engaging content, maintaining consistency with the established characters, settings, and plot. Follow the user's instructions while staying true to the story's tone and style.`,
};
// ─── Reducer ─────────────────────────────────────────────────────────────────
@ -149,6 +152,12 @@ function reducer(state: IState, action: Action): IState {
),
};
}
case 'SET_SYSTEM_INSTRUCTION': {
return {
...state,
systemInstruction: action.systemInstruction,
};
}
case 'SET_CURRENT_TAB': {
return {
...state,
@ -365,6 +374,7 @@ export interface AppState {
model: LLM.ModelInfo | null;
enableThinking: boolean;
bannedTokens: string[];
systemInstruction: string;
dispatch: (action: Action) => void;
}
@ -384,6 +394,7 @@ export const StateContextProvider = ({ children }: { children?: any }) => {
model: state.model,
enableThinking: state.enableThinking,
bannedTokens: state.bannedTokens ?? [],
systemInstruction: state.systemInstruction ?? '',
dispatch,
}), [state]);

View File

@ -276,6 +276,39 @@ namespace LLM {
export async function generate(connection: Connection, config: ChatCompletionRequest): Promise<ChatCompletionResponse> {
return request<ChatCompletionResponse>(connection, '/v1/chat/completions', 'POST', config);
}
const SUMMARIZATION_PROMPT = `Summarize the following text concisely while preserving key information and meaning. {level}
Text:
{text}
Provide a clear and coherent summary:`;
export type SummarizationLevel = 'sentence' | 'paragraph' | 'arbitrary';
const LEVEL_INSTRUCTIONS: Record<SummarizationLevel, string> = {
sentence: 'Summarize in exactly one sentence.',
paragraph: 'Summarize in exactly one paragraph (2-4 sentences).',
arbitrary: 'Summarize in a way you think is appropriate for the text length and complexity.',
};
export async function summarize(connection: Connection, model: string, text: string, level: SummarizationLevel = 'arbitrary'): Promise<string> {
const prompt = SUMMARIZATION_PROMPT
.replace('{text}', text)
.replace('{level}', LEVEL_INSTRUCTIONS[level]);
const response = await generate(connection, {
model,
messages: [{
role: 'user',
content: prompt,
}],
temperature: 0.3,
max_tokens: 500,
});
return response.choices[0]?.message.content ?? '';
}
}
export default LLM;

View File

@ -1,8 +1,89 @@
import LLM from "./llm";
import type { AppState } from "../contexts/state";
import { type AppState, LocationScale } from "../contexts/state";
import { Tools } from "./tools";
namespace Prompt {
export function formatCharactersMarkdown(state: AppState): string {
const { currentStory } = state;
if (!currentStory || !currentStory.characters?.length) {
return '';
}
const lines: string[] = [];
lines.push('## Characters\n');
for (const character of currentStory.characters) {
lines.push(`### ${character.name}`);
const description = character.shortDescription || character.description;
if (description) {
lines.push(description);
}
if (character.relations?.length) {
lines.push('**Relations:**');
for (const relation of character.relations) {
lines.push(`- ${relation.name}: ${relation.relation}`);
}
}
lines.push('');
}
return lines.join('\n');
}
export function formatLocationsMarkdown(state: AppState): string {
const { currentStory } = state;
if (!currentStory || !currentStory.locations?.length) {
return '';
}
const lines: string[] = [];
lines.push('## Locations\n');
for (const location of currentStory.locations) {
lines.push(`### ${location.name}`);
const description = location.shortDescription || location.description;
if (description) {
lines.push(description);
}
lines.push(`**Scale:** ${LocationScale[location.scale]}`);
lines.push('');
}
return lines.join('\n');
}
export function formatSystemPrompt(state: AppState): string {
const { currentStory } = state;
if (!currentStory) {
return state.systemInstruction;
}
const parts: string[] = [state.systemInstruction];
parts.push(`# ${currentStory.title}`);
if (currentStory.lore) {
parts.push('## Lore\n' + currentStory.lore);
}
const charactersSection = formatCharactersMarkdown(state);
if (charactersSection) {
parts.push(charactersSection);
}
const locationsSection = formatLocationsMarkdown(state);
if (locationsSection) {
parts.push(locationsSection);
}
return parts.join('\n\n');
}
export function compilePrompt(state: AppState, newMessages: LLM.ChatMessage[] = []): LLM.ChatCompletionRequest | null {
const { currentStory, model, enableThinking } = state;
@ -11,7 +92,7 @@ namespace Prompt {
}
const messages: LLM.ChatMessage[] = [
// TODO system prompt
{ role: 'system', content: formatSystemPrompt(state) },
// TODO part of story
...currentStory.chatMessages,
];