with_model_config_yaml_file.py
1 """ 2 Sample code to define a chat engine and save it with model config (YAML file). 3 """ 4 5 from llama_index.core import Document, VectorStoreIndex 6 from llama_index.core.chat_engine.types import ChatMode 7 from llama_index.llms.openai import OpenAI 8 9 import mlflow 10 11 model_config = mlflow.models.ModelConfig(development_config="model_config.yaml") 12 llm_config = model_config.get("llm") 13 model_name = llm_config.get("model_name") 14 temperature = llm_config.get("temperature") 15 llm = OpenAI(model=model_name, temperature=temperature) 16 17 index = VectorStoreIndex.from_documents(documents=[Document.example()]) 18 # Setting SIMPLE chat mode will create a SimpleChatEngine instance 19 chat_engine = index.as_chat_engine(llm=llm, chat_mode=ChatMode.SIMPLE) 20 21 mlflow.models.set_model(chat_engine)