1
0
Fork 0

LLM functions

This commit is contained in:
Pabloader 2026-03-20 21:13:13 +00:00
parent 1edea3dc42
commit 3fd2b60a10
3 changed files with 202 additions and 1 deletions

View File

@ -10,6 +10,8 @@ export const formatError = (error: unknown, message: string = ''): string => {
export const formatErrorMessage = (error: unknown): string => { export const formatErrorMessage = (error: unknown): string => {
if (error && typeof error === 'object' && 'message' in error) { if (error && typeof error === 'object' && 'message' in error) {
return `${error.message}`; return `${error.message}`;
} else if (error && typeof error === 'object' && 'data' in error) {
return `${error.data}`;
} else if (error) { } else if (error) {
return error.toString(); return error.toString();
} else { } else {

View File

@ -7,7 +7,7 @@ export interface ISSEOptions {
start?: boolean; start?: boolean;
} }
interface SSEEvent extends Event { export interface SSEEvent extends Event {
id?: string; id?: string;
source?: SSE; source?: SSE;
readyState?: number; readyState?: number;
@ -267,4 +267,8 @@ export default class SSE {
this.xhr = null; this.xhr = null;
this._setReadyState(SSE.CLOSED); this._setReadyState(SSE.CLOSED);
}; };
[Symbol.dispose]() {
this.close();
}
} }

View File

@ -0,0 +1,195 @@
import { formatError } from '@common/errors';
import SSE, { type SSEEvent } from '@common/sse';
namespace LLM {
export interface Connection {
url: string;
apiKey: string;
}
export interface ChatMessage {
role: 'system' | 'user' | 'assistant';
content: string;
}
export interface ChatCompletionRequest {
model: string;
messages: ChatMessage[];
temperature?: number;
max_tokens?: number;
stop?: string | string[];
banned_tokens?: string[];
top_p?: number;
frequency_penalty?: number;
presence_penalty?: number;
}
export interface ChatCompletionChoice {
index: number;
message: ChatMessage;
finish_reason: 'stop' | 'length' | 'content_filter';
}
export interface ChatCompletionResponse {
id: string;
object: 'chat.completion';
created: number;
model: string;
choices: ChatCompletionChoice[];
usage: {
prompt_tokens: number;
completion_tokens: number;
total_tokens: number;
spent_kudos?: number;
};
}
export interface ChatCompletionChunkChoice {
index: number;
delta: { role?: string; content?: string };
finish_reason: 'stop' | 'length' | 'content_filter' | null;
}
export interface ChatCompletionChunk {
id: string;
object: 'chat.completion.chunk';
created: number;
model: string;
choices: ChatCompletionChunkChoice[];
}
export interface ModelInfo {
id: string;
object: 'model';
created: number;
owned_by: string;
max_context?: number;
max_length?: number;
}
export interface ModelsResponse {
object: 'list';
data: ModelInfo[];
}
export interface CountTokensRequest {
model: string;
input: string | ChatMessage[];
}
export interface CountTokensResponse {
object: 'response.input_tokens';
input_tokens: number;
}
async function request<T>(connection: Connection, path: string, method: string = 'GET', body?: unknown): Promise<T> {
const headers = {
'Content-Type': 'application/json',
'Authorization': `Bearer ${connection.apiKey}`,
};
const url = new URL(connection.url);
url.pathname = path;
const response = await fetch(url, {
method,
headers,
body: body ? JSON.stringify(body) : undefined,
});
if (!response.ok) {
throw new Error(`HTTP error! status: ${response.status}`);
}
return response.json();
}
async function* streamRequest<T>(connection: Connection, path: string, method: string = 'GET', body?: unknown): AsyncGenerator<T> {
const headers = {
'Content-Type': 'application/json',
'Authorization': `Bearer ${connection.apiKey}`,
};
const url = new URL(connection.url);
url.pathname = path;
using sse = new SSE(url.toString(), {
headers,
method,
payload: body ? JSON.stringify(body) : undefined,
});
while (true) {
const event = await new Promise<{ data: string } | null>((resolve, reject) => {
const onMessage = (e: SSEEvent) => {
cleanup();
if (isMessageEvent(e)) {
resolve(e);
} else {
resolve(null);
}
};
const onError = (e: SSEEvent) => {
cleanup();
reject(new Error(formatError(e, 'SSE connection error')));
};
const onAbort = () => {
cleanup();
resolve(null);
};
const onReadyStateChange = (e: SSEEvent) => {
if (e != null && typeof e === 'object' && 'readyState' in e && e.readyState === SSE.CLOSED) {
cleanup();
resolve(null);
}
};
const cleanup = () => {
sse.removeEventListener('message', onMessage);
sse.removeEventListener('error', onError);
sse.removeEventListener('abort', onAbort);
sse.removeEventListener('readystatechange', onReadyStateChange);
};
sse.addEventListener('message', onMessage);
sse.addEventListener('error', onError);
sse.addEventListener('abort', onAbort);
sse.addEventListener('readystatechange', onReadyStateChange);
});
if (!event || event.data === '[DONE]') {
break;
}
if (event.data) {
try {
yield JSON.parse(event.data);
} catch (err) {
console.error('Failed to parse SSE data:', event.data, err);
}
}
}
}
function isMessageEvent(e: unknown): e is { data: string } {
return e != null && typeof e === 'object' && 'data' in e && typeof e.data === 'string';
}
export async function getModels(connection: Connection): Promise<ModelsResponse> {
return request<ModelsResponse>(connection, '/v1/models');
}
export async function countTokens(connection: Connection, body: CountTokensRequest): Promise<CountTokensResponse> {
return request<CountTokensResponse>(connection, '/v1/responses/input_tokens', 'POST', body);
}
export async function* generateStream(connection: Connection, config: ChatCompletionRequest): AsyncGenerator<ChatCompletionChunk> {
yield* streamRequest<ChatCompletionChunk>(connection, '/v1/chat/completions', 'POST', {
...config,
stream: true,
});
}
export async function generate(connection: Connection, config: ChatCompletionRequest): Promise<ChatCompletionResponse> {
return request<ChatCompletionResponse>(connection, '/v1/chat/completions', 'POST', config);
}
}
export default LLM;