1
0
Fork 0

Continue mode for chat

This commit is contained in:
Pabloader 2026-04-10 16:01:21 +00:00
parent 7148254b35
commit eaa79c6c49
4 changed files with 85 additions and 68 deletions

View File

@ -43,18 +43,6 @@
.messageActions {
display: flex;
gap: 4px;
opacity: 0;
transition: opacity 0.15s ease;
}
.message:hover .messageActions {
opacity: 1;
}
@media (max-width: 1000px) {
.messageActions {
opacity: 1;
}
}
.iconButton {

View File

@ -115,17 +115,17 @@ export const ChatPanel = ({ visible }: { visible: boolean }) => {
const countTokens = async () => {
try {
const messages: ChatMessage[] = [];
const newMessages: ChatMessage[] = [];
if (input.trim()) {
messages.push({
newMessages.push({
id: crypto.randomUUID(),
role: 'user',
content: input.trim(),
});
}
const chatRequest = Prompt.compilePrompt(appStateRef.current, messages);
const chatRequest = Prompt.compilePrompt(appStateRef.current, { newMessages });
const countRequest: LLM.CountTokensRequest = {
model: model.id,
input: chatRequest?.messages ?? [],
@ -148,14 +148,14 @@ export const ChatPanel = ({ visible }: { visible: boolean }) => {
return () => clearTimeout(timeoutId);
}, [currentStory, connection, model, input, currentStory?.chatMessages.length]);
const sendMessage = useCallback(async (
newMessages: Iterable<ChatMessage>,
excludedMessageIds: string[] = [],
) => {
const sendMessage = useCallback(async (config: Prompt.CompileConfig = {}) => {
if (!currentStory || !currentWorld || !connection || !model) return;
const { newMessages = [], excludedMessageIds = [] } = config;
const excludedSet = new Set(excludedMessageIds);
for (const message of newMessages) {
if (excludedMessageIds.includes(message.id)) continue;
if (excludedSet.has(message.id)) continue;
dispatch({
type: 'ADD_CHAT_MESSAGE',
worldId: currentWorld.id,
@ -164,20 +164,25 @@ export const ChatPanel = ({ visible }: { visible: boolean }) => {
});
}
const assistantMessageId = crypto.randomUUID();
dispatch({
type: 'ADD_CHAT_MESSAGE',
worldId: currentWorld.id,
storyId: currentStory.id,
message: {
id: assistantMessageId,
role: 'assistant',
content: '',
reasoning_content: 'Generating...',
},
});
const continuedMessage = config.continueLast ? currentStory.chatMessages.at(-1) : null;
const targetMessageId = continuedMessage?.id ?? crypto.randomUUID();
const targetRole = continuedMessage?.role ?? 'assistant';
const request = Prompt.compilePrompt(appStateRef.current, newMessages, excludedMessageIds);
if (!continuedMessage) {
dispatch({
type: 'ADD_CHAT_MESSAGE',
worldId: currentWorld.id,
storyId: currentStory.id,
message: {
id: targetMessageId,
role: 'assistant',
content: '',
reasoning_content: 'Generating...',
},
});
}
const request = Prompt.compilePrompt(appStateRef.current, config);
if (!request) {
setError('Failed to compile prompt');
@ -188,8 +193,8 @@ export const ChatPanel = ({ visible }: { visible: boolean }) => {
try {
const charName = currentWorld.title ?? 'Assistant';
const prefix = `${charName}: `;
let accumulatedContent = '';
let accumulatedReasoning = '';
let accumulatedContent = continuedMessage?.content ?? '';
let accumulatedReasoning = continuedMessage?.role === 'assistant' ? continuedMessage.reasoning_content ?? '' : '';
let tool_calls: LLM.ToolCall[] | undefined;
for await (const chunk of LLM.generateStream(connection, request)) {
@ -219,8 +224,8 @@ export const ChatPanel = ({ visible }: { visible: boolean }) => {
worldId: currentWorld.id,
storyId: currentStory.id,
message: {
id: assistantMessageId,
role: 'assistant',
id: targetMessageId,
role: targetRole,
content: accumulatedContent,
reasoning_content: accumulatedReasoning,
tool_calls,
@ -228,9 +233,9 @@ export const ChatPanel = ({ visible }: { visible: boolean }) => {
});
}
}
const assistantMessage: ChatMessage = {
id: assistantMessageId,
role: 'assistant',
const finalMessage: ChatMessage = {
id: targetMessageId,
role: targetRole,
content: accumulatedContent,
reasoning_content: accumulatedReasoning,
tool_calls,
@ -239,7 +244,7 @@ export const ChatPanel = ({ visible }: { visible: boolean }) => {
type: 'ADD_CHAT_MESSAGE',
worldId: currentWorld.id,
storyId: currentStory.id,
message: assistantMessage,
message: finalMessage,
});
if (tool_calls) {
@ -265,7 +270,13 @@ export const ChatPanel = ({ visible }: { visible: boolean }) => {
}
if (!abortControllerRef.current?.signal.aborted) {
return sendMessage([...newMessages, assistantMessage, ...toolMessages]);
return sendMessage({
newMessages: [
...newMessages,
finalMessage,
...toolMessages,
]
});
}
}
} catch (err) {
@ -288,7 +299,7 @@ export const ChatPanel = ({ visible }: { visible: boolean }) => {
setError(null);
abortControllerRef.current = new AbortController();
const excludedMessages: string[] = [];
const excludedMessageIds = new Set<string>();
try {
if (isAssistant) {
// Delete the last assistant message and regenerate
@ -298,9 +309,9 @@ export const ChatPanel = ({ visible }: { visible: boolean }) => {
storyId: currentStory.id,
messageId: lastMessage.id,
});
excludedMessages.push(lastMessage.id);
excludedMessageIds.add(lastMessage.id);
}
await sendMessage([], excludedMessages);
await sendMessage({ excludedMessageIds });
} finally {
setIsLoading(false);
}
@ -322,11 +333,13 @@ export const ChatPanel = ({ visible }: { visible: boolean }) => {
abortControllerRef.current = new AbortController();
try {
await sendMessage([{
id: crypto.randomUUID(),
role: 'user' as const,
content: input.trim(),
}]);
await sendMessage({
newMessages: [{
id: crypto.randomUUID(),
role: 'user' as const,
content: input.trim(),
}]
});
} finally {
setIsLoading(false);
}
@ -341,15 +354,21 @@ export const ChatPanel = ({ visible }: { visible: boolean }) => {
abortControllerRef.current = new AbortController();
try {
await sendMessage([{
id: crypto.randomUUID(),
role: 'user' as const,
content: (continuePrompt + '\n\n' + input).trim(),
}]);
if (currentWorld?.chatOnly) {
await sendMessage({ continueLast: true });
} else {
await sendMessage({
newMessages: [{
id: crypto.randomUUID(),
role: 'user' as const,
content: (continuePrompt + '\n\n' + input).trim(),
}]
});
}
} finally {
setIsLoading(false);
}
}, [currentStory, input, connection, model, isLoading, sendMessage]);
}, [currentStory, currentWorld, input, connection, model, isLoading, sendMessage]);
const handleStopGeneration = useCallback(() => {
abortControllerRef.current?.abort();
@ -599,16 +618,14 @@ export const ChatPanel = ({ visible }: { visible: boolean }) => {
<Sparkles size={14} />
</button>
)}
{!currentWorld?.chatOnly && (
<button
class={styles.actionButton}
onClick={handleContinue}
disabled={isDisabled}
title="Continue"
>
<ChevronsRight size={14} />
</button>
)}
<button
class={styles.actionButton}
onClick={handleContinue}
disabled={isDisabled}
title="Continue"
>
<ChevronsRight size={14} />
</button>
<button
class={styles.actionButton}
onClick={handleRegenerate}

View File

@ -72,6 +72,7 @@ namespace LLM {
max_tokens?: number;
};
add_generation_prompt?: boolean;
remove_last_eos?: boolean;
}
export interface ChatCompletionChoice {

View File

@ -346,12 +346,22 @@ namespace Prompt {
return parts.join('\n\n');
}
export interface CompileConfig {
newMessages?: Iterable<ChatMessage>;
excludedMessageIds?: Iterable<string>;
continueLast?: boolean;
}
export function compilePrompt(
state: AppState,
newMessages: Iterable<ChatMessage> = [],
excludedMessageIds: Iterable<string> = [],
config: CompileConfig = {},
): LLM.ChatCompletionRequest | null {
const { currentStory, model, enableThinking, currentWorld } = state;
const {
newMessages = [],
excludedMessageIds = [],
continueLast = false,
} = config;
if (!currentStory || !model) {
return null;
@ -397,6 +407,7 @@ namespace Prompt {
messages: applyVars(formattedMessages),
max_tokens: model.top_provider.max_completion_tokens || 2048,
banned_tokens: state.bannedTokens,
...(continueLast && { remove_last_eos: true }),
};
}