/ mlflow / deployments / __init__.py
__init__.py
  1  """
  2  Exposes functionality for deploying MLflow models to custom serving tools.
  3  
  4  Note: model deployment to AWS Sagemaker can currently be performed via the
  5  :py:mod:`mlflow.sagemaker` module. Model deployment to Azure can be performed by using the
  6  `azureml library <https://pypi.org/project/azureml-mlflow/>`_.
  7  
  8  MLflow does not currently provide built-in support for any other deployment targets, but support
  9  for custom targets can be installed via third-party plugins. See a list of known plugins
 10  `here <https://mlflow.org/docs/latest/plugins.html#deployment-plugins>`_.
 11  
 12  This page largely focuses on the user-facing deployment APIs. For instructions on implementing
 13  your own plugin for deployment to a custom serving tool, see
 14  `plugin docs <http://mlflow.org/docs/latest/plugins.html#writing-your-own-mlflow-plugins>`_.
 15  """
 16  
 17  import contextlib
 18  import json
 19  
 20  from mlflow.deployments.base import BaseDeploymentClient
 21  from mlflow.deployments.databricks import DatabricksDeploymentClient, DatabricksEndpoint
 22  from mlflow.deployments.interface import get_deploy_client, run_local
 23  from mlflow.deployments.openai import OpenAIDeploymentClient
 24  from mlflow.deployments.utils import get_deployments_target, set_deployments_target
 25  from mlflow.exceptions import MlflowException
 26  from mlflow.protos.databricks_pb2 import INVALID_PARAMETER_VALUE
 27  
 28  with contextlib.suppress(Exception):
 29      # MlflowDeploymentClient depends on optional dependencies and can't be imported
 30      # if they are not installed.
 31      from mlflow.deployments.mlflow import MlflowDeploymentClient
 32  
 33  
 34  class PredictionsResponse(dict):
 35      """
 36      Represents the predictions and metadata returned in response to a scoring request, such as a
 37      REST API request sent to the ``/invocations`` endpoint of an MLflow Model Server.
 38      """
 39  
 40      def get_predictions(self, predictions_format="dataframe", dtype=None):
 41          """Get the predictions returned from the MLflow Model Server in the specified format.
 42  
 43          Args:
 44              predictions_format: The format in which to return the predictions. Either
 45                  ``"dataframe"`` or ``"ndarray"``.
 46              dtype: The NumPy datatype to which to coerce the predictions. Only used when
 47                  the "ndarray" predictions_format is specified.
 48  
 49          Raises:
 50              Exception: If the predictions cannot be represented in the specified format.
 51  
 52          Returns:
 53              The predictions, represented in the specified format.
 54  
 55          """
 56          import numpy as np
 57          import pandas as pd
 58          from pandas.core.dtypes.common import is_list_like
 59  
 60          if predictions_format == "dataframe":
 61              predictions = self["predictions"]
 62              if isinstance(predictions, str):
 63                  return pd.DataFrame(data=[predictions])
 64              if isinstance(predictions, dict) and not any(
 65                  is_list_like(p) and getattr(p, "ndim", 1) == 1 for p in predictions.values()
 66              ):
 67                  return pd.DataFrame(data=predictions, index=[0])
 68              return pd.DataFrame(data=predictions)
 69          elif predictions_format == "ndarray":
 70              return np.array(self["predictions"], dtype)
 71          else:
 72              raise MlflowException(
 73                  f"Unrecognized predictions format: '{predictions_format}'",
 74                  INVALID_PARAMETER_VALUE,
 75              )
 76  
 77      def to_json(self, path=None):
 78          """Get the JSON representation of the MLflow Predictions Response.
 79  
 80          Args:
 81              path: If specified, the JSON representation is written to this file path.
 82  
 83          Returns:
 84              If ``path`` is unspecified, the JSON representation of the MLflow Predictions
 85              Response. Else, None.
 86  
 87          """
 88          if path is not None:
 89              with open(path, "w") as f:
 90                  json.dump(dict(self), f)
 91          else:
 92              return json.dumps(dict(self))
 93  
 94      @classmethod
 95      def from_json(cls, json_str):
 96          try:
 97              parsed_response = json.loads(json_str)
 98          except Exception as e:
 99              raise MlflowException("Predictions response contents are not valid JSON") from e
100          if not isinstance(parsed_response, dict) or "predictions" not in parsed_response:
101              raise MlflowException(
102                  f"Invalid response. Predictions response contents must be a dictionary"
103                  f" containing a 'predictions' field. Instead, received: {parsed_response}"
104              )
105          return PredictionsResponse(parsed_response)
106  
107  
108  __all__ = [
109      "get_deploy_client",
110      "run_local",
111      "BaseDeploymentClient",
112      "DatabricksDeploymentClient",
113      "OpenAIDeploymentClient",
114      "DatabricksEndpoint",
115      "MlflowDeploymentClient",
116      "PredictionsResponse",
117      "get_deployments_target",
118      "set_deployments_target",
119  ]