model_as_code.py
1 # This example demonstrates defining a model directly from code. 2 # This feature allows for defining model logic within a python script, module, or notebook that is stored 3 # directly as serialized code, as opposed to object serialization that would otherwise occur when saving 4 # or logging a model object. 5 # This script defines the model's logic and specifies which class within the file contains the model code. 6 # The companion example to this, model_as_code_driver.py, is the driver code that performs the logging and 7 # loading of this model definition. 8 import os 9 10 import pandas as pd 11 12 import mlflow 13 from mlflow import pyfunc 14 15 assert "OPENAI_API_KEY" in os.environ, "Please set the OPENAI_API_KEY environment variable." 16 17 18 class AIModel(pyfunc.PythonModel): 19 @mlflow.trace(name="chain", span_type="CHAIN") 20 def predict(self, context, model_input): 21 if isinstance(model_input, pd.DataFrame): 22 model_input = model_input["input"].tolist() 23 24 responses = [] 25 for user_input in model_input: 26 response = self.get_open_ai_model_response(str(user_input)) 27 responses.append(response.choices[0].message.content) 28 29 return pd.DataFrame({"response": responses}) 30 31 @mlflow.trace(name="open_ai", span_type="LLM") 32 def get_open_ai_model_response(self, user_input): 33 from openai import OpenAI 34 35 return OpenAI().chat.completions.create( 36 model="gpt-4o-mini", 37 messages=[ 38 { 39 "role": "system", 40 "content": "You are a helpful assistant. You are here to provide useful information to the user.", 41 }, 42 { 43 "role": "user", 44 "content": user_input, 45 }, 46 ], 47 ) 48 49 50 # IMPORTANT: The model code needs to call `mlflow.models.set_model()` to set the model, 51 # which will be loaded back using `mlflow.pyfunc.load_model` for inference. 52 mlflow.models.set_model(AIModel())