/ agent-scan / utils / llm_manager.py
llm_manager.py
  1  # Copyright (c) 2024-2026 Tencent Zhuque Lab. All rights reserved.
  2  #
  3  # Licensed under the Apache License, Version 2.0 (the "License");
  4  # you may not use this file except in compliance with the License.
  5  # You may obtain a copy of the License at
  6  #
  7  #     http://www.apache.org/licenses/LICENSE-2.0
  8  #
  9  # Unless required by applicable law or agreed to in writing, software
 10  # distributed under the License is distributed on an "AS IS" BASIS,
 11  # WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
 12  # See the License for the specific language governing permissions and
 13  # limitations under the License.
 14  #
 15  # Requirement: Any integration or derivative work must explicitly attribute
 16  # Tencent Zhuque Lab (https://github.com/Tencent/AI-Infra-Guard) in its
 17  # documentation or user interface, as detailed in the NOTICE file.
 18  
 19  """
 20  LLM管理器 - 管理多个专用LLM实例
 21  """
 22  from typing import Dict, Optional
 23  from utils.llm import LLM
 24  from utils.logging import logger
 25  from utils import config
 26  
 27  
 28  class LLMManager:
 29      """管理多个专用LLM实例,支持不同用途的模型配置"""
 30      
 31      # 预定义的模型配置(从环境变量读取)
 32      DEFAULT_CONFIGS = {
 33          "default": {
 34              "model": config.DEFAULT_MODEL,
 35              "base_url": config.DEFAULT_BASE_URL,
 36              "description": "默认模型"
 37          },
 38          "thinking": {
 39              "model": config.THINKING_MODEL,
 40              "base_url": config.THINKING_BASE_URL,
 41              "api_key": config.THINKING_API_KEY,  # 可选,为 None 时使用主 API Key
 42              "description": "专门用于深度思考和推理的模型"
 43          },
 44          "coding": {
 45              "model": config.CODING_MODEL,
 46              "base_url": config.CODING_BASE_URL,
 47              "api_key": config.CODING_API_KEY,  # 可选,为 None 时使用主 API Key
 48              "description": "专门用于代码生成和分析的模型"
 49          },
 50          "fast": {
 51              "model": config.FAST_MODEL,
 52              "base_url": config.FAST_BASE_URL,
 53              "api_key": config.FAST_API_KEY,  # 可选,为 None 时使用主 API Key
 54              "description": "用于快速响应的轻量级模型"
 55          },
 56      }
 57      
 58      def __init__(self, api_key: str, base_url: str = None):
 59          """
 60          初始化LLM管理器
 61          
 62          Args:
 63              api_key: 主 API 密钥(作为默认值)
 64              base_url: 主 API 基础URL(作为默认值,如果不提供则使用 config.DEFAULT_BASE_URL)
 65          """
 66          self.default_api_key = api_key
 67          self.default_base_url = base_url or config.DEFAULT_BASE_URL
 68          self._llm_instances: Dict[str, LLM] = {}
 69          self._custom_configs: Dict[str, dict] = {}
 70      
 71      def configure(
 72          self, 
 73          purpose: str, 
 74          model: str, 
 75          temperature: float = 0.7,
 76          base_url: Optional[str] = None,
 77          api_key: Optional[str] = None
 78      ) -> None:
 79          """
 80          配置特定用途的模型
 81          
 82          Args:
 83              purpose: 用途标识(如 "thinking", "coding")
 84              model: 模型名称
 85              temperature: 温度参数
 86              base_url: API 基础 URL(可选,不提供则使用默认)
 87              api_key: API 密钥(可选,不提供则使用默认)
 88          """
 89          self._custom_configs[purpose] = {
 90              "model": model,
 91              "temperature": temperature,
 92              "base_url": base_url,
 93              "api_key": api_key
 94          }
 95          
 96          # 清除已有实例,下次获取时重新创建
 97          if purpose in self._llm_instances:
 98              del self._llm_instances[purpose]
 99          
100          logger.info(f"Configured LLM for purpose '{purpose}': {model}")
101      
102      def get_llm(self, purpose: str = "default") -> Optional[LLM]:
103          """
104          获取指定用途的LLM实例
105          
106          Args:
107              purpose: 用途标识
108              
109          Returns:
110              LLM实例,如果未配置返回None
111          """
112          # 如果已有实例,直接返回
113          if purpose in self._llm_instances:
114              return self._llm_instances[purpose]
115          
116          # 查找配置
117          llm_config = None
118          if purpose in self._custom_configs:
119              llm_config = self._custom_configs[purpose].copy()
120          elif purpose in self.DEFAULT_CONFIGS:
121              llm_config = self.DEFAULT_CONFIGS[purpose].copy()
122              if "temperature" not in llm_config:
123                  llm_config["temperature"] = 0.7
124          
125          if llm_config is None:
126              logger.warning(f"No configuration found for purpose '{purpose}'")
127              return None
128          
129          # 确定使用的 API Key 和 Base URL
130          # 优先级:配置中指定的 > 默认配置中的 > 主配置
131          api_key = llm_config.get("api_key") or self.default_api_key
132          base_url = llm_config.get("base_url") or self.default_base_url
133          
134          # 创建新实例
135          try:
136              llm = LLM(
137                  model=llm_config["model"],
138                  api_key=api_key,
139                  base_url=base_url
140              )
141              llm.temperature = llm_config.get("temperature", 0.7)
142              self._llm_instances[purpose] = llm
143              return llm
144          except Exception as e:
145              logger.error(f"Failed to create LLM for purpose '{purpose}': {e}")
146              return None
147      
148      def get_specialized_llms(self, purposes: list[str] = None) -> Dict[str, LLM]:
149          """
150          批量获取多个专用LLM实例
151          
152          Args:
153              purposes: 用途列表,如果为None则返回所有已配置的
154              
155          Returns:
156              用途到LLM实例的字典
157          """
158          if purposes is None:
159              # 获取所有已配置的
160              purposes = list(set(
161                  list(self._custom_configs.keys()) + 
162                  list(self.DEFAULT_CONFIGS.keys())
163              ))
164          
165          result = {}
166          for purpose in purposes:
167              llm = self.get_llm(purpose)
168              if llm:
169                  result[purpose] = llm
170          
171          return result
172      
173      def list_available_purposes(self) -> list[tuple[str, str]]:
174          """
175          列出所有可用的用途及其描述
176          
177          Returns:
178              (用途, 描述) 的列表
179          """
180          purposes = []
181          
182          # 自定义配置
183          for purpose, llm_config in self._custom_configs.items():
184              model = llm_config['model']
185              base_url = llm_config.get('base_url') or self.default_base_url
186              purposes.append((purpose, f"Custom: {model} @ {base_url}"))
187          
188          # 默认配置
189          for purpose, llm_config in self.DEFAULT_CONFIGS.items():
190              if purpose not in self._custom_configs:
191                  model = llm_config['model']
192                  base_url = llm_config.get('base_url') or self.default_base_url
193                  desc = llm_config["description"]
194                  purposes.append((purpose, f"{desc} ({model} @ {base_url})"))
195          
196          return purposes
197  
198  
199  # 全局单例(可选)
200  _global_manager: Optional[LLMManager] = None
201  
202  
203  def get_global_manager() -> Optional[LLMManager]:
204      """获取全局LLM管理器实例"""
205      return _global_manager
206  
207  
208  def init_global_manager(api_key: str, base_url: str = "https://openrouter.ai/api/v1") -> LLMManager:
209      """
210      初始化全局LLM管理器
211      
212      Args:
213          api_key: API密钥
214          base_url: API基础URL
215          
216      Returns:
217          LLM管理器实例
218      """
219      global _global_manager
220      _global_manager = LLMManager(api_key, base_url)
221      return _global_manager
222  
223  
224  if __name__ == '__main__':
225      import os
226      
227      # 使用示例 - 需要设置环境变量 OPENROUTER_API_KEY
228      api_key = os.environ.get("OPENROUTER_API_KEY")
229      if not api_key:
230          print("请设置环境变量 OPENROUTER_API_KEY")
231          exit(1)
232      
233      manager = LLMManager(
234          api_key=api_key,
235          base_url="https://openrouter.ai/api/v1"
236      )
237      
238      # 自定义配置(可以指定独立的 base_url)
239      manager.configure(
240          "thinking", 
241          "deepseek/deepseek-reasoner", 
242          temperature=0.3,
243          base_url="https://api.deepseek.com/v1"  # 可选:使用专门的 base_url
244      )
245      manager.configure(
246          "coding", 
247          "anthropic/claude-3.5-sonnet", 
248          temperature=0.5
249          # 不指定 base_url,使用默认值
250      )
251      
252      # 获取专用LLM
253      thinking_llm = manager.get_llm("thinking")
254      coding_llm = manager.get_llm("coding")
255      
256      # 批量获取
257      specialized_llms = manager.get_specialized_llms(["thinking", "coding"])
258      
259      print("Available purposes:")
260      for purpose, desc in manager.list_available_purposes():
261          print(f"  - {purpose}: {desc}")
262