llm.py
 1  """
 2  LLM initialization and utilities.
 3  """
 4  
 5  from langchain_community.llms import Ollama
 6  from kamaji.config import load_config
 7  
 8  
 9  def get_llm(temperature: float = None):
10      """
11      Get configured LLM instance.
12  
13      Args:
14          temperature: Override temperature from config
15      """
16      config = load_config()
17  
18      return Ollama(
19          model=config["model"],
20          base_url=config["base_url"],
21          temperature=temperature if temperature is not None else config["temperature"]
22      )