/ config / llm-api-proxy.yaml.example
llm-api-proxy.yaml.example
 1  _version: "0.2.0"
 2  
 3  proxy:
 4    host: 0.0.0.0
 5    port: 8082
 6    log_level: INFO
 7    enable_streaming: true
 8  
 9  providers:
10    anthropic:
11      type: anthropic
12      base_url: https://api.anthropic.com
13      api_key_env: ANTHROPIC_API_KEY
14    openai:
15      type: openai
16      base_url: https://api.openai.com/v1
17      api_key_env: OPENAI_API_KEY
18    local-openai:
19      type: openai-compatible
20      base_url: http://host.docker.internal:8033/v1  # Use host.docker.internal to reach host machine
21      api_key_env: LOCAL_OPENAI_API_KEY
22  
23  models:
24    "anthropic:opus-4.5-202501122":
25      provider: anthropic
26      target_model: opus-4.5-202501122
27    "openai:gpt-5.2":
28      provider: openai
29      target_model: gpt-5.2
30    "local-openai:myllmmodel-55":
31      provider: local-openai
32      target_model: myllmmodel-55
33  
34  routing:
35    default_provider: anthropic
36    allow_unmapped_models: false