/ app / src / types / llm.ts
llm.ts
 1  export type MessageRole = 'system' | 'user' | 'assistant';
 2  
 3  export interface Message {
 4    role: MessageRole;
 5    content: string;
 6  }
 7  
 8  export interface StreamChunk {
 9    content: string;
10    done: boolean;
11    error?: string;
12  }
13  
14  export interface LLMRequest {
15    messages: Message[];
16    model?: string;
17    temperature?: number;
18    max_tokens?: number;
19    stream?: boolean;
20  }
21  
22  export interface LLMResponse {
23    content: string;
24    model?: string;
25    usage?: Usage;
26  }
27  
28  export interface Usage {
29    prompt_tokens: number;
30    completion_tokens: number;
31    total_tokens: number;
32  }
33  
34  export interface ProviderStatus {
35    name: string;
36    available: boolean;
37    configured: boolean;
38    error?: string;
39  }
40  
41  export interface ModelInfo {
42    id: string;
43    name: string;
44    provider: string;
45  }
46  
47  export type Provider = 'anthropic' | 'openai' | 'ollama' | 'lm_studio';