/ chat_workflow / llm / __init__.py
__init__.py
 1  import os
 2  from typing import List, Optional
 3  from langchain_core.language_models.chat_models import BaseChatModel
 4  from .factory import LLMFactory
 5  from .capabilities import ModelCapability  # noqa
 6  from .providers import OllamaProvider, OpenAIProvider, AnthropicProvider, XAIProvider, GroqProvider, GoogleProvider
 7  
 8  # Initialize factory
 9  llm_factory = LLMFactory()
10  # Register providers
11  llm_factory.register_provider(
12      "ollama", OllamaProvider(os.getenv("OLLAMA_URL")))
13  llm_factory.register_provider("openai", OpenAIProvider())
14  llm_factory.register_provider("anthropic", AnthropicProvider())
15  llm_factory.register_provider("xai", XAIProvider())
16  llm_factory.register_provider("groq", GroqProvider())
17  llm_factory.register_provider("google", GoogleProvider())