From 3fd2b60a1054a25368f4f6636c8e83d6cd95e626 Mon Sep 17 00:00:00 2001 From: Pabloader Date: Fri, 20 Mar 2026 21:13:13 +0000 Subject: [PATCH] LLM functions --- src/common/errors.ts | 2 + src/common/sse.ts | 6 +- src/games/storywriter/utils/llm.ts | 195 +++++++++++++++++++++++++++++ 3 files changed, 202 insertions(+), 1 deletion(-) create mode 100644 src/games/storywriter/utils/llm.ts diff --git a/src/common/errors.ts b/src/common/errors.ts index 5b933b9..5d71883 100644 --- a/src/common/errors.ts +++ b/src/common/errors.ts @@ -10,6 +10,8 @@ export const formatError = (error: unknown, message: string = ''): string => { export const formatErrorMessage = (error: unknown): string => { if (error && typeof error === 'object' && 'message' in error) { return `${error.message}`; + } else if (error && typeof error === 'object' && 'data' in error) { + return `${error.data}`; } else if (error) { return error.toString(); } else { diff --git a/src/common/sse.ts b/src/common/sse.ts index bf59b51..a9e3b02 100644 --- a/src/common/sse.ts +++ b/src/common/sse.ts @@ -7,7 +7,7 @@ export interface ISSEOptions { start?: boolean; } -interface SSEEvent extends Event { +export interface SSEEvent extends Event { id?: string; source?: SSE; readyState?: number; @@ -267,4 +267,8 @@ export default class SSE { this.xhr = null; this._setReadyState(SSE.CLOSED); }; + + [Symbol.dispose]() { + this.close(); + } } \ No newline at end of file diff --git a/src/games/storywriter/utils/llm.ts b/src/games/storywriter/utils/llm.ts new file mode 100644 index 0000000..e0f6d45 --- /dev/null +++ b/src/games/storywriter/utils/llm.ts @@ -0,0 +1,195 @@ +import { formatError } from '@common/errors'; +import SSE, { type SSEEvent } from '@common/sse'; + +namespace LLM { + export interface Connection { + url: string; + apiKey: string; + } + export interface ChatMessage { + role: 'system' | 'user' | 'assistant'; + content: string; + } + + export interface ChatCompletionRequest { + model: string; + messages: ChatMessage[]; + temperature?: number; + max_tokens?: number; + stop?: string | string[]; + banned_tokens?: string[]; + top_p?: number; + frequency_penalty?: number; + presence_penalty?: number; + } + + export interface ChatCompletionChoice { + index: number; + message: ChatMessage; + finish_reason: 'stop' | 'length' | 'content_filter'; + } + + export interface ChatCompletionResponse { + id: string; + object: 'chat.completion'; + created: number; + model: string; + choices: ChatCompletionChoice[]; + usage: { + prompt_tokens: number; + completion_tokens: number; + total_tokens: number; + spent_kudos?: number; + }; + } + + export interface ChatCompletionChunkChoice { + index: number; + delta: { role?: string; content?: string }; + finish_reason: 'stop' | 'length' | 'content_filter' | null; + } + + export interface ChatCompletionChunk { + id: string; + object: 'chat.completion.chunk'; + created: number; + model: string; + choices: ChatCompletionChunkChoice[]; + } + + export interface ModelInfo { + id: string; + object: 'model'; + created: number; + owned_by: string; + max_context?: number; + max_length?: number; + } + + export interface ModelsResponse { + object: 'list'; + data: ModelInfo[]; + } + + export interface CountTokensRequest { + model: string; + input: string | ChatMessage[]; + } + + export interface CountTokensResponse { + object: 'response.input_tokens'; + input_tokens: number; + } + + async function request(connection: Connection, path: string, method: string = 'GET', body?: unknown): Promise { + const headers = { + 'Content-Type': 'application/json', + 'Authorization': `Bearer ${connection.apiKey}`, + }; + + const url = new URL(connection.url); + url.pathname = path; + + const response = await fetch(url, { + method, + headers, + body: body ? JSON.stringify(body) : undefined, + }); + if (!response.ok) { + throw new Error(`HTTP error! status: ${response.status}`); + } + return response.json(); + } + + async function* streamRequest(connection: Connection, path: string, method: string = 'GET', body?: unknown): AsyncGenerator { + const headers = { + 'Content-Type': 'application/json', + 'Authorization': `Bearer ${connection.apiKey}`, + }; + + const url = new URL(connection.url); + url.pathname = path; + + using sse = new SSE(url.toString(), { + headers, + method, + payload: body ? JSON.stringify(body) : undefined, + }); + + while (true) { + const event = await new Promise<{ data: string } | null>((resolve, reject) => { + const onMessage = (e: SSEEvent) => { + cleanup(); + if (isMessageEvent(e)) { + resolve(e); + } else { + resolve(null); + } + }; + const onError = (e: SSEEvent) => { + cleanup(); + reject(new Error(formatError(e, 'SSE connection error'))); + }; + const onAbort = () => { + cleanup(); + resolve(null); + }; + const onReadyStateChange = (e: SSEEvent) => { + if (e != null && typeof e === 'object' && 'readyState' in e && e.readyState === SSE.CLOSED) { + cleanup(); + resolve(null); + } + }; + + const cleanup = () => { + sse.removeEventListener('message', onMessage); + sse.removeEventListener('error', onError); + sse.removeEventListener('abort', onAbort); + sse.removeEventListener('readystatechange', onReadyStateChange); + }; + + sse.addEventListener('message', onMessage); + sse.addEventListener('error', onError); + sse.addEventListener('abort', onAbort); + sse.addEventListener('readystatechange', onReadyStateChange); + }); + + if (!event || event.data === '[DONE]') { + break; + } + + if (event.data) { + try { + yield JSON.parse(event.data); + } catch (err) { + console.error('Failed to parse SSE data:', event.data, err); + } + } + } + } + + function isMessageEvent(e: unknown): e is { data: string } { + return e != null && typeof e === 'object' && 'data' in e && typeof e.data === 'string'; + } + + export async function getModels(connection: Connection): Promise { + return request(connection, '/v1/models'); + } + + export async function countTokens(connection: Connection, body: CountTokensRequest): Promise { + return request(connection, '/v1/responses/input_tokens', 'POST', body); + } + + export async function* generateStream(connection: Connection, config: ChatCompletionRequest): AsyncGenerator { + yield* streamRequest(connection, '/v1/chat/completions', 'POST', { + ...config, + stream: true, + }); + } + + export async function generate(connection: Connection, config: ChatCompletionRequest): Promise { + return request(connection, '/v1/chat/completions', 'POST', config); + } +} + +export default LLM; \ No newline at end of file