/ examples / openai / azure_openai.py
azure_openai.py
 1  import openai
 2  import pandas as pd
 3  
 4  import mlflow
 5  
 6  """
 7  Set environment variables for Azure OpenAI service
 8  export OPENAI_API_KEY="<AZURE OPENAI KEY>"
 9  # OPENAI_API_BASE should be the endpoint of your Azure OpenAI resource
10  # e.g. https://<service-name>.openai.azure.com/
11  export OPENAI_API_BASE="<AZURE OPENAI BASE>"
12  # OPENAI_API_VERSION e.g. 2023-05-15
13  export OPENAI_API_VERSION="<AZURE OPENAI API VERSION>"
14  export OPENAI_API_TYPE="azure"
15  export OPENAI_DEPLOYMENT_NAME="<AZURE OPENAI DEPLOYMENT ID OR NAME>"
16  """
17  
18  with mlflow.start_run():
19      model_info = mlflow.openai.log_model(
20          # Your Azure OpenAI model e.g. gpt-4o-mini
21          model="<YOUR AZURE OPENAI MODEL>",
22          task=openai.chat.completions,
23          name="model",
24          messages=[{"role": "user", "content": "Tell me a joke about {animal}."}],
25      )
26  
27  # Load native OpenAI model
28  native_model = mlflow.openai.load_model(model_info.model_uri)
29  completion = openai.chat.completions.create(
30      deployment_id=native_model["deployment_id"],
31      messages=native_model["messages"],
32  )
33  print(completion["choices"][0]["message"]["content"])
34  
35  
36  # Load as Pyfunc model
37  model = mlflow.pyfunc.load_model(model_info.model_uri)
38  df = pd.DataFrame({
39      "animal": [
40          "cats",
41          "dogs",
42      ]
43  })
44  print(model.predict(df))
45  
46  list_of_dicts = [
47      {"animal": "cats"},
48      {"animal": "dogs"},
49  ]
50  print(model.predict(list_of_dicts))
51  
52  list_of_strings = [
53      "cats",
54      "dogs",
55  ]
56  print(model.predict(list_of_strings))
57  
58  list_of_strings = [
59      "Let me hear your thoughts on AI",
60      "Let me hear your thoughts on ML",
61  ]
62  model = mlflow.pyfunc.load_model(model_info.model_uri)
63  print(model.predict(list_of_strings))