/ tests / llama_index / sample_code / with_model_config.py
with_model_config.py
 1  """
 2  Sample code to define a chat engine and save it with model config (dictionary).
 3  """
 4  
 5  from llama_index.core import Document, VectorStoreIndex
 6  from llama_index.core.chat_engine.types import ChatMode
 7  from llama_index.llms.openai import OpenAI
 8  
 9  import mlflow
10  
11  model_config = mlflow.models.ModelConfig()
12  model_name = model_config.get("model_name")
13  temperature = model_config.get("temperature")
14  llm = OpenAI(model=model_name, temperature=temperature)
15  
16  index = VectorStoreIndex.from_documents(documents=[Document.example()])
17  # Setting SIMPLE chat mode will create a SimpleChatEngine instance
18  chat_engine = index.as_chat_engine(llm=llm, chat_mode=ChatMode.SIMPLE)
19  
20  mlflow.models.set_model(chat_engine)