providerConfig.js
1 export const PROVIDER_CONFIG = { 2 Ollama: { 3 label: "Ollama", 4 description: "Run open-source models locally with Ollama", 5 fields: [ 6 { name: "model", label: "Model", type: "text", required: true, placeholder: "e.g. llama3.1" }, 7 { name: "base_url", label: "Base URL", type: "text", required: false, placeholder: "http://localhost:11434", default: "" }, 8 { name: "temperature", label: "Temperature", type: "number", required: false, default: 0.1, step: 0.1 }, 9 { name: "keep_alive", label: "Keep Alive (seconds)", type: "number", required: false, default: 0 }, 10 { name: "request_timeout", label: "Request Timeout (seconds)", type: "number", required: false, default: 120 }, 11 ], 12 }, 13 OllamaMultiModal: { 14 label: "Ollama MultiModal", 15 description: "Vision models via Ollama (LLaVA, etc.)", 16 fields: [ 17 { name: "model", label: "Model", type: "text", required: true, placeholder: "e.g. llava:13b" }, 18 { name: "base_url", label: "Base URL", type: "text", required: false, placeholder: "http://localhost:11434", default: "" }, 19 { name: "temperature", label: "Temperature", type: "number", required: false, default: 0.1, step: 0.1 }, 20 { name: "keep_alive", label: "Keep Alive (seconds)", type: "number", required: false, default: 0 }, 21 { name: "request_timeout", label: "Request Timeout (seconds)", type: "number", required: false, default: 120 }, 22 ], 23 }, 24 OpenAI: { 25 label: "OpenAI", 26 description: "GPT-4o, GPT-4o Mini, and other OpenAI models", 27 fields: [ 28 { name: "model", label: "Model", type: "text", required: true, placeholder: "e.g. gpt-4o" }, 29 { name: "api_key", label: "API Key", type: "password", required: false, placeholder: "Uses OPENAI_API_KEY env var if empty" }, 30 { name: "temperature", label: "Temperature", type: "number", required: false, default: 0, step: 0.1 }, 31 ], 32 }, 33 OpenAILike: { 34 label: "OpenAI-Compatible", 35 description: "Any provider with an OpenAI-compatible API", 36 fields: [ 37 { name: "model", label: "Model", type: "text", required: true, placeholder: "e.g. my-model" }, 38 { name: "api_base", label: "API Base URL", type: "text", required: true, placeholder: "https://api.example.com/v1" }, 39 { name: "api_key", label: "API Key", type: "password", required: false, placeholder: "API key if required" }, 40 { name: "temperature", label: "Temperature", type: "number", required: false, default: 0, step: 0.1 }, 41 { name: "is_chat_model", label: "Is Chat Model", type: "boolean", required: false, default: true }, 42 ], 43 }, 44 Anthropic: { 45 label: "Anthropic", 46 description: "Claude models from Anthropic", 47 fields: [ 48 { name: "model", label: "Model", type: "text", required: true, placeholder: "e.g. claude-sonnet-4-20250514" }, 49 { name: "api_key", label: "API Key", type: "password", required: false, placeholder: "Uses ANTHROPIC_API_KEY env var if empty" }, 50 { name: "temperature", label: "Temperature", type: "number", required: false, default: 0, step: 0.1 }, 51 { name: "max_tokens", label: "Max Tokens", type: "number", required: false, default: 4096 }, 52 ], 53 }, 54 Grok: { 55 label: "Grok (xAI)", 56 description: "Grok models from xAI", 57 fields: [ 58 { name: "model", label: "Model", type: "text", required: false, default: "grok-beta", placeholder: "e.g. grok-beta" }, 59 { name: "api_key", label: "API Key", type: "password", required: false, placeholder: "Uses XAI_API_KEY env var if empty" }, 60 { name: "temperature", label: "Temperature", type: "number", required: false, default: 0, step: 0.1 }, 61 ], 62 }, 63 LiteLLM: { 64 label: "LiteLLM", 65 description: "Unified API proxy for 100+ LLM providers", 66 fields: [ 67 { name: "model", label: "Model", type: "text", required: true, placeholder: "e.g. openai/gpt-4o" }, 68 { name: "api_base", label: "API Base URL", type: "text", required: false, placeholder: "https://litellm.example.com" }, 69 { name: "api_key", label: "API Key", type: "password", required: false, placeholder: "API key if required" }, 70 { name: "temperature", label: "Temperature", type: "number", required: false, default: 0, step: 0.1 }, 71 ], 72 }, 73 vLLM: { 74 label: "vLLM", 75 description: "High-throughput serving with vLLM", 76 fields: [ 77 { name: "model", label: "Model", type: "text", required: true, placeholder: "e.g. meta-llama/Llama-3.1-8B" }, 78 { name: "api_url", label: "API URL", type: "text", required: true, placeholder: "http://localhost:8000" }, 79 { name: "temperature", label: "Temperature", type: "number", required: false, default: 0, step: 0.1 }, 80 { name: "max_tokens", label: "Max Tokens", type: "number", required: false, default: 4096 }, 81 ], 82 }, 83 Gemini: { 84 label: "Google Gemini", 85 description: "Google Gemini text models", 86 fields: [ 87 { name: "model", label: "Model", type: "text", required: true, placeholder: "e.g. models/gemini-2.0-flash" }, 88 { name: "api_key", label: "API Key", type: "password", required: false, placeholder: "Uses GOOGLE_API_KEY env var if empty" }, 89 { name: "temperature", label: "Temperature", type: "number", required: false, default: 0, step: 0.1 }, 90 ], 91 }, 92 GeminiMultiModal: { 93 label: "Gemini MultiModal", 94 description: "Google Gemini with vision capabilities", 95 fields: [ 96 { name: "model", label: "Model", type: "text", required: true, placeholder: "e.g. models/gemini-2.0-flash" }, 97 { name: "api_key", label: "API Key", type: "password", required: false, placeholder: "Uses GOOGLE_API_KEY env var if empty" }, 98 { name: "temperature", label: "Temperature", type: "number", required: false, default: 0, step: 0.1 }, 99 ], 100 }, 101 AzureOpenAI: { 102 label: "Azure OpenAI", 103 description: "OpenAI models hosted on Microsoft Azure", 104 fields: [ 105 { name: "model", label: "Model / Deployment Name", type: "text", required: true, placeholder: "e.g. gpt-4o" }, 106 { name: "engine", label: "Engine (Deployment ID)", type: "text", required: true, placeholder: "e.g. my-gpt4o-deployment" }, 107 { name: "azure_endpoint", label: "Azure Endpoint", type: "text", required: true, placeholder: "https://my-resource.openai.azure.com/" }, 108 { name: "api_key", label: "API Key", type: "password", required: false, placeholder: "Uses AZURE_OPENAI_API_KEY env var if empty" }, 109 { name: "api_version", label: "API Version", type: "text", required: false, default: "2024-02-15-preview", placeholder: "e.g. 2024-02-15-preview" }, 110 { name: "temperature", label: "Temperature", type: "number", required: false, default: 0, step: 0.1 }, 111 ], 112 }, 113 Bedrock: { 114 label: "Amazon Bedrock", 115 description: "Access Claude, Llama, Mistral, and more via AWS Bedrock", 116 fields: [ 117 { name: "model", label: "Model ID", type: "text", required: true, placeholder: "e.g. anthropic.claude-3-sonnet-20240229-v1:0" }, 118 { name: "region_name", label: "AWS Region", type: "text", required: false, placeholder: "e.g. us-east-1", default: "us-east-1" }, 119 { name: "aws_access_key_id", label: "AWS Access Key ID", type: "password", required: false, placeholder: "Uses AWS_ACCESS_KEY_ID env var if empty" }, 120 { name: "aws_secret_access_key", label: "AWS Secret Access Key", type: "password", required: false, placeholder: "Uses AWS_SECRET_ACCESS_KEY env var if empty" }, 121 { name: "temperature", label: "Temperature", type: "number", required: false, default: 0, step: 0.1 }, 122 { name: "max_tokens", label: "Max Tokens", type: "number", required: false, default: 4096 }, 123 ], 124 }, 125 };