/ mlflow / cli / __init__.py
__init__.py
   1  import contextlib
   2  import json
   3  import logging
   4  import os
   5  import re
   6  import sys
   7  import warnings
   8  from datetime import timedelta
   9  from pathlib import Path
  10  
  11  import click
  12  from click import UsageError
  13  from click.core import ParameterSource
  14  from dotenv import load_dotenv
  15  
  16  import mlflow.db
  17  import mlflow.deployments.cli
  18  import mlflow.experiments
  19  import mlflow.runs
  20  import mlflow.store.artifact.cli
  21  from mlflow import ai_commands, projects, version
  22  from mlflow.entities import ViewType
  23  from mlflow.entities.lifecycle_stage import LifecycleStage
  24  from mlflow.environment_variables import (
  25      MLFLOW_ENABLE_WORKSPACES,
  26      MLFLOW_EXPERIMENT_ID,
  27      MLFLOW_EXPERIMENT_NAME,
  28      MLFLOW_WORKSPACE,
  29      MLFLOW_WORKSPACE_STORE_URI,
  30  )
  31  from mlflow.exceptions import InvalidUrlException, MlflowException
  32  from mlflow.protos.databricks_pb2 import INVALID_PARAMETER_VALUE, RESOURCE_DOES_NOT_EXIST, ErrorCode
  33  from mlflow.store.artifact.artifact_repository_registry import get_artifact_repository
  34  from mlflow.store.tracking import (
  35      DEFAULT_ARTIFACTS_URI,
  36      DEFAULT_LOCAL_FILE_AND_ARTIFACT_PATH,
  37  )
  38  from mlflow.store.workspace.utils import get_default_workspace_optional
  39  from mlflow.telemetry.events import TrackingServerStartEvent
  40  from mlflow.telemetry.track import _record_event
  41  from mlflow.tracking import _get_store
  42  from mlflow.tracking._tracking_service.utils import (
  43      _get_default_tracking_uri,
  44      is_tracking_uri_set,
  45      set_tracking_uri,
  46  )
  47  from mlflow.tracking._workspace.registry import get_workspace_store
  48  from mlflow.utils import cli_args, workspace_context
  49  from mlflow.utils.logging_utils import eprint
  50  from mlflow.utils.os import is_windows
  51  from mlflow.utils.plugins import get_entry_points
  52  from mlflow.utils.process import ShellCommandException
  53  from mlflow.utils.server_cli_utils import (
  54      artifacts_only_config_validation,
  55      assert_server_workspace_env_unset,
  56      resolve_default_artifact_root,
  57  )
  58  from mlflow.utils.workspace_utils import resolve_workspace_store_uri
  59  
  60  _logger = logging.getLogger(__name__)
  61  
  62  
  63  class AliasedGroup(click.Group):
  64      def get_command(self, ctx, cmd_name):
  65          # `mlflow ui` is an alias for `mlflow server`
  66          cmd_name = "server" if cmd_name == "ui" else cmd_name
  67          return super().get_command(ctx, cmd_name)
  68  
  69  
  70  def _load_env_file(ctx: click.Context, param: click.Parameter, value: str | None) -> str | None:
  71      """
  72      Click callback to load environment variables from a dotenv file.
  73  
  74      This function is designed to be used as an eager callback for the --env-file option,
  75      ensuring that environment variables are loaded before any command execution.
  76      """
  77      if value is not None:
  78          env_path = Path(value)
  79          if not env_path.exists():
  80              raise click.BadParameter(f"Environment file '{value}' does not exist.")
  81  
  82          # Load the environment file
  83          # override=False means existing environment variables take precedence
  84          load_dotenv(env_path, override=False)
  85  
  86          # Log that we've loaded the env file (using click.echo for CLI output)
  87          click.echo(f"Loaded environment variables from: {value}")
  88  
  89      return value
  90  
  91  
  92  @click.group(cls=AliasedGroup)
  93  @click.version_option(version=version.VERSION)
  94  @click.option(
  95      "--env-file",
  96      type=click.Path(exists=False),
  97      callback=_load_env_file,
  98      expose_value=True,
  99      is_eager=True,
 100      help="Load environment variables from a dotenv file before executing the command. "
 101      "Variables in the file will be loaded but won't override existing environment variables.",
 102  )
 103  def cli(env_file):
 104      pass
 105  
 106  
 107  @cli.command()
 108  @click.argument("uri")
 109  @click.option(
 110      "--entry-point",
 111      "-e",
 112      metavar="NAME",
 113      default="main",
 114      help="Entry point within project. [default: main]. If the entry point is not found, "
 115      "attempts to run the project file with the specified name as a script, "
 116      "using 'python' to run .py files and the default shell (specified by "
 117      "environment variable $SHELL) to run .sh files",
 118  )
 119  @click.option(
 120      "--version",
 121      "-v",
 122      metavar="VERSION",
 123      help="Version of the project to run, as a Git commit reference for Git projects.",
 124  )
 125  @click.option(
 126      "--param-list",
 127      "-P",
 128      metavar="NAME=VALUE",
 129      multiple=True,
 130      help="A parameter for the run, of the form -P name=value. Provided parameters that "
 131      "are not in the list of parameters for an entry point will be passed to the "
 132      "corresponding entry point as command-line arguments in the form `--name value`",
 133  )
 134  @click.option(
 135      "--docker-args",
 136      "-A",
 137      metavar="NAME=VALUE",
 138      multiple=True,
 139      help="A `docker run` argument or flag, of the form -A name=value (e.g. -A gpus=all) "
 140      "or -A name (e.g. -A t). The argument will then be passed as "
 141      "`docker run --name value` or `docker run --name` respectively. ",
 142  )
 143  @click.option(
 144      "--experiment-name",
 145      envvar=MLFLOW_EXPERIMENT_NAME.name,
 146      help="Name of the experiment under which to launch the run. If not "
 147      "specified, 'experiment-id' option will be used to launch run.",
 148  )
 149  @click.option(
 150      "--experiment-id",
 151      envvar=MLFLOW_EXPERIMENT_ID.name,
 152      type=click.STRING,
 153      help="ID of the experiment under which to launch the run.",
 154  )
 155  # TODO: Add tracking server argument once we have it working.
 156  @click.option(
 157      "--backend",
 158      "-b",
 159      metavar="BACKEND",
 160      default="local",
 161      help="Execution backend to use for run. Supported values: 'local', 'databricks', "
 162      "kubernetes (experimental). Defaults to 'local'. If running against "
 163      "Databricks, will run against a Databricks workspace determined as follows: "
 164      "if a Databricks tracking URI of the form 'databricks://profile' has been set "
 165      "(e.g. by setting the MLFLOW_TRACKING_URI environment variable), will run "
 166      "against the workspace specified by <profile>. Otherwise, runs against the "
 167      "workspace specified by the default Databricks CLI profile. See "
 168      "https://github.com/databricks/databricks-cli for more info on configuring a "
 169      "Databricks CLI profile.",
 170  )
 171  @click.option(
 172      "--backend-config",
 173      "-c",
 174      metavar="FILE",
 175      help="Path to JSON file (must end in '.json') or JSON string which will be passed "
 176      "as config to the backend. The exact content which should be "
 177      "provided is different for each execution backend and is documented "
 178      "at https://www.mlflow.org/docs/latest/projects.html.",
 179  )
 180  @cli_args.ENV_MANAGER_PROJECTS
 181  @click.option(
 182      "--storage-dir",
 183      envvar="MLFLOW_TMP_DIR",
 184      help="Only valid when ``backend`` is local. "
 185      "MLflow downloads artifacts from distributed URIs passed to parameters of "
 186      "type 'path' to subdirectories of storage_dir.",
 187  )
 188  @click.option(
 189      "--run-id",
 190      metavar="RUN_ID",
 191      help="If specified, the given run ID will be used instead of creating a new run. "
 192      "Note: this argument is used internally by the MLflow project APIs "
 193      "and should not be specified.",
 194  )
 195  @click.option(
 196      "--run-name",
 197      metavar="RUN_NAME",
 198      help="The name to give the MLflow Run associated with the project execution. If not specified, "
 199      "the MLflow Run name is left unset.",
 200  )
 201  @click.option(
 202      "--build-image",
 203      is_flag=True,
 204      default=False,
 205      show_default=True,
 206      help=(
 207          "Only valid for Docker projects. If specified, build a new Docker image that's based on "
 208          "the image specified by the `image` field in the MLproject file, and contains files in the "
 209          "project directory."
 210      ),
 211  )
 212  def run(
 213      uri,
 214      entry_point,
 215      version,
 216      param_list,
 217      docker_args,
 218      experiment_name,
 219      experiment_id,
 220      backend,
 221      backend_config,
 222      env_manager,
 223      storage_dir,
 224      run_id,
 225      run_name,
 226      build_image,
 227  ):
 228      """
 229      Run an MLflow project from the given URI.
 230  
 231      For local runs, the run will block until it completes.
 232      Otherwise, the project will run asynchronously.
 233  
 234      If running locally (the default), the URI can be either a Git repository URI or a local path.
 235      If running on Databricks, the URI must be a Git repository.
 236  
 237      By default, Git projects run in a new working directory with the given parameters, while
 238      local projects run from the project's root directory.
 239      """
 240      if experiment_id is not None and experiment_name is not None:
 241          raise click.UsageError("Specify only one of 'experiment-name' or 'experiment-id' options.")
 242  
 243      param_dict = _user_args_to_dict(param_list)
 244      args_dict = _user_args_to_dict(docker_args, argument_type="A")
 245  
 246      if backend_config is not None and os.path.splitext(backend_config)[-1] != ".json":
 247          try:
 248              backend_config = json.loads(backend_config)
 249          except ValueError as e:
 250              raise click.UsageError(f"Invalid backend config JSON. Parse error: {e}") from e
 251      if backend == "kubernetes":
 252          if backend_config is None:
 253              raise click.UsageError("Specify 'backend_config' when using kubernetes mode.")
 254      try:
 255          projects.run(
 256              uri,
 257              entry_point,
 258              version,
 259              experiment_name=experiment_name,
 260              experiment_id=experiment_id,
 261              parameters=param_dict,
 262              docker_args=args_dict,
 263              backend=backend,
 264              backend_config=backend_config,
 265              env_manager=env_manager,
 266              storage_dir=storage_dir,
 267              synchronous=backend in ("local", "kubernetes") or backend is None,
 268              run_id=run_id,
 269              run_name=run_name,
 270              build_image=build_image,
 271          )
 272      except projects.ExecutionException as e:
 273          _logger.error("=== %s ===", e)
 274          sys.exit(1)
 275  
 276  
 277  def _user_args_to_dict(arguments, argument_type="P"):
 278      user_dict = {}
 279      for arg in arguments:
 280          split = arg.split("=", maxsplit=1)
 281          # Docker arguments such as `t` don't require a value -> set to True if specified
 282          if len(split) == 1 and argument_type == "A":
 283              name = split[0]
 284              value = True
 285          elif len(split) == 2:
 286              name = split[0]
 287              value = split[1]
 288          else:
 289              raise click.UsageError(
 290                  f"Invalid format for -{argument_type} parameter: '{arg}'. "
 291                  f"Use -{argument_type} name=value."
 292              )
 293          if name in user_dict:
 294              raise click.UsageError(f"Repeated parameter: '{name}'")
 295          user_dict[name] = value
 296      return user_dict
 297  
 298  
 299  def _validate_server_args(
 300      ctx=None,
 301      gunicorn_opts=None,
 302      workers=None,
 303      waitress_opts=None,
 304      uvicorn_opts=None,
 305      allowed_hosts=None,
 306      cors_allowed_origins=None,
 307      x_frame_options=None,
 308      disable_security_middleware=None,
 309  ):
 310      if sys.platform == "win32":
 311          if gunicorn_opts is not None:
 312              raise NotImplementedError(
 313                  "gunicorn is not supported on Windows, cannot specify --gunicorn-opts"
 314              )
 315  
 316      num_server_opts_specified = sum(
 317          1 for opt in [gunicorn_opts, waitress_opts, uvicorn_opts] if opt is not None
 318      )
 319      if num_server_opts_specified > 1:
 320          raise click.UsageError(
 321              "Cannot specify multiple server options. Choose one of: "
 322              "'--gunicorn-opts', '--waitress-opts', or '--uvicorn-opts'."
 323          )
 324  
 325      using_flask_only = gunicorn_opts is not None or waitress_opts is not None
 326      # NB: Only check for security params that are explicitly passed via CLI (not env vars)
 327      # This allows Docker containers to set env vars while using gunicorn
 328      from click.core import ParameterSource
 329  
 330      security_params_specified = False
 331      if ctx:
 332          security_params_specified = any([
 333              ctx.get_parameter_source("allowed_hosts") == ParameterSource.COMMANDLINE,
 334              ctx.get_parameter_source("cors_allowed_origins") == ParameterSource.COMMANDLINE,
 335              (
 336                  ctx.get_parameter_source("disable_security_middleware")
 337                  == ParameterSource.COMMANDLINE
 338              ),
 339          ])
 340  
 341      if using_flask_only and security_params_specified:
 342          raise click.UsageError(
 343              "Security middleware parameters (--allowed-hosts, --cors-allowed-origins, "
 344              "--disable-security-middleware) are only supported with "
 345              "the default uvicorn server. They cannot be used with --gunicorn-opts or "
 346              "--waitress-opts. To use security features, run without specifying a server "
 347              "option (uses uvicorn by default) or explicitly use --uvicorn-opts."
 348          )
 349  
 350  
 351  def _validate_static_prefix(ctx, param, value):
 352      """
 353      Validate that the static_prefix option starts with a "/" and does not end in a "/".
 354      Conforms to the callback interface of click documented at
 355      http://click.pocoo.org/5/options/#callbacks-for-validation.
 356      """
 357      if value is not None:
 358          if not value.startswith("/"):
 359              raise UsageError("--static-prefix must begin with a '/'.")
 360          if value.endswith("/"):
 361              raise UsageError("--static-prefix should not end with a '/'.")
 362      return value
 363  
 364  
 365  @cli.command()
 366  @click.pass_context
 367  @click.option(
 368      "--backend-store-uri",
 369      envvar="MLFLOW_BACKEND_STORE_URI",
 370      metavar="PATH",
 371      default=None,
 372      help="URI to which to persist experiment and run data. Acceptable URIs are "
 373      "SQLAlchemy-compatible database connection strings "
 374      "(e.g. 'sqlite:///path/to/file.db') or local filesystem URIs "
 375      "(e.g. 'file:///absolute/path/to/directory'). By default, data will be logged "
 376      "to the ./mlruns directory.",
 377  )
 378  @click.option(
 379      "--registry-store-uri",
 380      envvar="MLFLOW_REGISTRY_STORE_URI",
 381      metavar="URI",
 382      default=None,
 383      help="URI to which to persist registered models. Acceptable URIs are "
 384      "SQLAlchemy-compatible database connection strings (e.g. 'sqlite:///path/to/file.db'). "
 385      "If not specified, `backend-store-uri` is used.",
 386  )
 387  @click.option(
 388      "--default-artifact-root",
 389      envvar="MLFLOW_DEFAULT_ARTIFACT_ROOT",
 390      metavar="URI",
 391      default=None,
 392      help="Directory in which to store artifacts for any new experiments created. For tracking "
 393      "server backends that rely on SQL, this option is required in order to store artifacts. "
 394      "Note that this flag does not impact already-created experiments with any previous "
 395      "configuration of an MLflow server instance. "
 396      f"By default, data will be logged to the {DEFAULT_ARTIFACTS_URI} uri proxy if "
 397      "the --serve-artifacts option is enabled. Otherwise, the default location will "
 398      f"be {DEFAULT_LOCAL_FILE_AND_ARTIFACT_PATH}.",
 399  )
 400  @cli_args.SERVE_ARTIFACTS
 401  @click.option(
 402      "--artifacts-only",
 403      envvar="MLFLOW_ARTIFACTS_ONLY",
 404      is_flag=True,
 405      default=False,
 406      help="If specified, configures the mlflow server to be used only for proxied artifact serving. "
 407      "With this mode enabled, functionality of the mlflow tracking service (e.g. run creation, "
 408      "metric logging, and parameter logging) is disabled. The server will only expose "
 409      "endpoints for uploading, downloading, and listing artifacts. "
 410      "Default: False",
 411  )
 412  @cli_args.ARTIFACTS_DESTINATION
 413  @cli_args.HOST
 414  @cli_args.PORT
 415  @cli_args.WORKERS
 416  @cli_args.ALLOWED_HOSTS
 417  @cli_args.CORS_ALLOWED_ORIGINS
 418  @cli_args.DISABLE_SECURITY_MIDDLEWARE
 419  @cli_args.X_FRAME_OPTIONS
 420  @click.option(
 421      "--static-prefix",
 422      envvar="MLFLOW_STATIC_PREFIX",
 423      default=None,
 424      callback=_validate_static_prefix,
 425      help="A prefix which will be prepended to the path of all static paths.",
 426  )
 427  @click.option(
 428      "--gunicorn-opts",
 429      envvar="MLFLOW_GUNICORN_OPTS",
 430      default=None,
 431      help="Additional command line options forwarded to gunicorn processes.",
 432  )
 433  @click.option(
 434      "--waitress-opts", default=None, help="Additional command line options for waitress-serve."
 435  )
 436  @click.option(
 437      "--uvicorn-opts",
 438      envvar="MLFLOW_UVICORN_OPTS",
 439      default=None,
 440      help="Additional command line options forwarded to uvicorn processes (used by default).",
 441  )
 442  @click.option(
 443      "--expose-prometheus",
 444      envvar="MLFLOW_EXPOSE_PROMETHEUS",
 445      default=None,
 446      help="Path to the directory where metrics will be stored. If the directory "
 447      "doesn't exist, it will be created. "
 448      "Activate prometheus exporter to expose metrics on /metrics endpoint.",
 449  )
 450  @click.option(
 451      "--app-name",
 452      default=None,
 453      type=click.Choice([e.name for e in get_entry_points("mlflow.app")]),
 454      show_default=True,
 455      help=(
 456          "Application name to be used for the tracking server. "
 457          "If not specified, 'mlflow.server:app' will be used."
 458      ),
 459  )
 460  @click.option(
 461      "--dev",
 462      is_flag=True,
 463      default=False,
 464      show_default=True,
 465      help=(
 466          "If enabled, run the server with debug logging and auto-reload. "
 467          "Should only be used for development purposes. "
 468          "Cannot be used with '--gunicorn-opts' or '--uvicorn-opts'. "
 469          "Unsupported on Windows."
 470      ),
 471  )
 472  @click.option(
 473      "--secrets-cache-ttl",
 474      type=click.IntRange(10, 300),
 475      default=60,
 476      show_default=True,
 477      help=(
 478          "Server-side secrets cache time-to-live in seconds. "
 479          "Controls how long decrypted secrets are cached in memory (encrypted with AES-GCM-256). "
 480          "Lower values (10-30s) are more secure but impact performance. "
 481          "Higher values (120-300s) improve performance but increase exposure window. "
 482          "Range: 10-300 seconds."
 483      ),
 484  )
 485  @click.option(
 486      "--secrets-cache-max-size",
 487      type=click.IntRange(1, 10000),
 488      default=1000,
 489      show_default=True,
 490      help=(
 491          "Server-side secrets cache maximum entries. "
 492          "When exceeded, least recently used entries are evicted. "
 493          "Range: 1-10000 entries."
 494      ),
 495  )
 496  @click.option(
 497      "--workspace-store-uri",
 498      envvar=MLFLOW_WORKSPACE_STORE_URI.name,
 499      metavar="URI",
 500      default=None,
 501      help=(
 502          "Workspace provider backend URI used for workspace CRUD APIs and request routing. "
 503          "When unspecified, defaults to the backend store URI. This only needs to be specified "
 504          "when using a workspace store plugin leveraging externally managed workspaces (e.g. "
 505          + "Kubernetes namespaces)."
 506      ),
 507  )
 508  @click.option(
 509      "--enable-workspaces/--disable-workspaces",
 510      default=False,
 511      show_default=True,
 512      help="Enable backwards compatible workspaces mode for logical isolation of experiments, "
 513      + "registered models, and prompts.",
 514  )
 515  def server(
 516      ctx,
 517      backend_store_uri,
 518      registry_store_uri,
 519      default_artifact_root,
 520      serve_artifacts,
 521      artifacts_only,
 522      artifacts_destination,
 523      host,
 524      port,
 525      workers,
 526      allowed_hosts,
 527      cors_allowed_origins,
 528      disable_security_middleware,
 529      x_frame_options,
 530      static_prefix,
 531      gunicorn_opts,
 532      waitress_opts,
 533      expose_prometheus,
 534      app_name,
 535      dev,
 536      uvicorn_opts,
 537      secrets_cache_ttl,
 538      secrets_cache_max_size,
 539      workspace_store_uri,
 540      enable_workspaces,
 541  ):
 542      """
 543      Run the MLflow tracking server with built-in security middleware.
 544  
 545      The server listens on http://localhost:5000 by default and only accepts connections
 546      from the local machine. To let the server accept connections from other machines, you will need
 547      to pass ``--host 0.0.0.0`` to listen on all network interfaces
 548      (or a specific interface address).
 549  
 550      See https://mlflow.org/docs/latest/tracking/server-security.html for detailed documentation
 551      and guidance on security configurations for the MLflow tracking server.
 552      """
 553      from mlflow.server import _run_server
 554      from mlflow.server.handlers import initialize_backend_stores
 555  
 556      # Get env_file from parent context
 557      env_file = ctx.parent.params.get("env_file") if ctx.parent else None
 558  
 559      if dev:
 560          if is_windows():
 561              raise click.UsageError("'--dev' is not supported on Windows.")
 562          if gunicorn_opts:
 563              raise click.UsageError("'--dev' and '--gunicorn-opts' cannot be specified together.")
 564          if uvicorn_opts:
 565              raise click.UsageError("'--dev' and '--uvicorn-opts' cannot be specified together.")
 566          if app_name:
 567              raise click.UsageError(
 568                  "'--dev' cannot be used with '--app-name'. Development mode with auto-reload "
 569                  "is only supported for the default MLflow tracking server."
 570              )
 571  
 572          uvicorn_opts = "--reload --log-level debug"
 573  
 574      _validate_server_args(
 575          ctx=ctx,
 576          gunicorn_opts=gunicorn_opts,
 577          workers=workers,
 578          waitress_opts=waitress_opts,
 579          uvicorn_opts=uvicorn_opts,
 580          allowed_hosts=allowed_hosts,
 581          cors_allowed_origins=cors_allowed_origins,
 582          x_frame_options=x_frame_options,
 583          disable_security_middleware=disable_security_middleware,
 584      )
 585  
 586      # click treats any non-empty env var as "set" for flag options, which would interpret
 587      # MLFLOW_ENABLE_WORKSPACES="false" as True. If the flag wasn't set explicitly and
 588      # resolved to False, fall back to the env var parser to preserve "false"/"0".
 589      if (
 590          ctx
 591          and not enable_workspaces
 592          and ctx.get_parameter_source("enable_workspaces") != ParameterSource.COMMANDLINE
 593      ):
 594          enable_workspaces = MLFLOW_ENABLE_WORKSPACES.get()
 595  
 596      assert_server_workspace_env_unset()
 597  
 598      # Keep environment flag in sync with the resolved boolean so server-side gating
 599      # (which reads MLFLOW_ENABLE_WORKSPACES.get()) has a single source of truth.
 600      os.environ[MLFLOW_ENABLE_WORKSPACES.name] = "true" if enable_workspaces else "false"
 601      if enable_workspaces and workspace_store_uri:
 602          os.environ[MLFLOW_WORKSPACE_STORE_URI.name] = workspace_store_uri
 603      elif workspace_store_uri:
 604          click.echo(
 605              "Ignoring --workspace-store-uri because workspaces are not enabled. "
 606              "Use --enable-workspaces to activate workspace mode.",
 607              err=True,
 608          )
 609  
 610      if disable_security_middleware:
 611          os.environ["MLFLOW_SERVER_DISABLE_SECURITY_MIDDLEWARE"] = "true"
 612      else:
 613          if allowed_hosts:
 614              os.environ["MLFLOW_SERVER_ALLOWED_HOSTS"] = allowed_hosts
 615              if allowed_hosts == "*":
 616                  click.echo(
 617                      "WARNING: Accepting ALL hosts. "
 618                      "This may leave the server vulnerable to DNS rebinding attacks."
 619                  )
 620  
 621          if cors_allowed_origins:
 622              os.environ["MLFLOW_SERVER_CORS_ALLOWED_ORIGINS"] = cors_allowed_origins
 623              if cors_allowed_origins == "*":
 624                  click.echo(
 625                      "WARNING: Allowing ALL origins for CORS. "
 626                      "This allows ANY website to access your MLflow data. "
 627                      "This configuration is only recommended for local development."
 628                  )
 629  
 630          if x_frame_options:
 631              os.environ["MLFLOW_SERVER_X_FRAME_OPTIONS"] = x_frame_options
 632  
 633      if not backend_store_uri:
 634          backend_store_uri = _get_default_tracking_uri()
 635          click.echo(f"Backend store URI not provided. Using {backend_store_uri}")
 636  
 637      if not registry_store_uri:
 638          registry_store_uri = backend_store_uri
 639          click.echo("Registry store URI not provided. Using backend store URI.")
 640  
 641      default_artifact_root = resolve_default_artifact_root(
 642          serve_artifacts, default_artifact_root, backend_store_uri
 643      )
 644      artifacts_only_config_validation(artifacts_only, backend_store_uri, enable_workspaces)
 645  
 646      if not artifacts_only:
 647          try:
 648              initialize_backend_stores(
 649                  backend_store_uri,
 650                  registry_store_uri,
 651                  default_artifact_root,
 652                  workspace_store_uri=workspace_store_uri,
 653              )
 654          except Exception as e:
 655              _logger.error("Error initializing backend store")
 656              _logger.exception(e)
 657              sys.exit(1)
 658  
 659      if disable_security_middleware:
 660          click.echo(
 661              "[MLflow] WARNING: Security middleware is DISABLED. "
 662              "Your MLflow server is vulnerable to various attacks.",
 663              err=True,
 664          )
 665      elif not allowed_hosts and not cors_allowed_origins:
 666          click.echo(
 667              "[MLflow] Security middleware enabled with default settings (localhost-only). "
 668              "To allow connections from other hosts, use --host 0.0.0.0 and configure "
 669              "--allowed-hosts and --cors-allowed-origins.",
 670              err=True,
 671          )
 672      else:
 673          parts = ["[MLflow] Security middleware enabled"]
 674          if allowed_hosts:
 675              hosts_list = allowed_hosts.split(",")[:3]
 676              if len(allowed_hosts.split(",")) > 3:
 677                  hosts_list.append(f"and {len(allowed_hosts.split(',')) - 3} more")
 678              parts.append(f"Allowed hosts: {', '.join(hosts_list)}")
 679          if cors_allowed_origins:
 680              origins_list = cors_allowed_origins.split(",")[:3]
 681              if len(cors_allowed_origins.split(",")) > 3:
 682                  origins_list.append(f"and {len(cors_allowed_origins.split(',')) - 3} more")
 683              parts.append(f"CORS origins: {', '.join(origins_list)}")
 684          click.echo(". ".join(parts) + ".", err=True)
 685  
 686      _record_event(
 687          TrackingServerStartEvent,
 688          TrackingServerStartEvent.parse({
 689              "backend_store_uri": backend_store_uri,
 690              "serve_artifacts": serve_artifacts,
 691              "artifacts_only": artifacts_only,
 692              "expose_prometheus": expose_prometheus,
 693              "app_name": app_name,
 694              "enable_workspaces": enable_workspaces,
 695              "workers": workers,
 696              "dev": dev,
 697          })
 698          or {},
 699      )
 700  
 701      try:
 702          _run_server(
 703              file_store_path=backend_store_uri,
 704              registry_store_uri=registry_store_uri,
 705              default_artifact_root=default_artifact_root,
 706              serve_artifacts=serve_artifacts,
 707              artifacts_only=artifacts_only,
 708              artifacts_destination=artifacts_destination,
 709              host=host,
 710              port=port,
 711              static_prefix=static_prefix,
 712              workers=workers,
 713              gunicorn_opts=gunicorn_opts,
 714              waitress_opts=waitress_opts,
 715              expose_prometheus=expose_prometheus,
 716              app_name=app_name,
 717              uvicorn_opts=uvicorn_opts,
 718              env_file=env_file,
 719              secrets_cache_ttl=secrets_cache_ttl,
 720              secrets_cache_max_size=secrets_cache_max_size,
 721          )
 722      except ShellCommandException:
 723          eprint("Running the mlflow server failed. Please see the logs above for details.")
 724          sys.exit(1)
 725  
 726  
 727  def _gc_tracking_resources(
 728      backend_store,
 729      run_ids: list[str] | None,
 730      experiment_ids: list[str] | None,
 731      logged_model_ids: list[str] | None,
 732      older_than: str | None,
 733      time_delta: int,
 734      skip_experiments: bool,
 735      skip_logged_models: bool,
 736      ignore_not_found: bool = False,
 737  ):
 738      """
 739      Perform garbage collection of tracking resources (runs, experiments, logged models).
 740  
 741      This is the core implementation of the gc command, extracted to support workspace iteration.
 742  
 743      Args:
 744          backend_store: The tracking store instance.
 745          run_ids: Optional list of specific run IDs to delete.
 746          experiment_ids: Optional list of specific experiment IDs to delete.
 747          logged_model_ids: Optional list of specific logged model IDs to delete.
 748          older_than: Original older_than string for error messages.
 749          time_delta: Time delta in milliseconds for age filtering.
 750          skip_experiments: Whether to skip experiment deletion.
 751          skip_logged_models: Whether to skip logged model deletion.
 752          ignore_not_found: If True, skip RESOURCE_DOES_NOT_EXIST errors for explicit IDs
 753              that may not exist (e.g., when iterating over multiple workspaces).
 754      """
 755      from mlflow.utils.time import get_current_time_millis
 756  
 757      deleted_run_ids_older_than = backend_store._get_deleted_runs(older_than=time_delta)
 758      run_ids_to_delete = run_ids if run_ids is not None else list(deleted_run_ids_older_than)
 759  
 760      deleted_logged_model_ids = (
 761          backend_store._get_deleted_logged_models() if not skip_logged_models else []
 762      )
 763  
 764      deleted_logged_model_ids_older_than = (
 765          backend_store._get_deleted_logged_models(older_than=time_delta)
 766          if not skip_logged_models
 767          else []
 768      )
 769      logged_model_ids_to_delete = (
 770          logged_model_ids
 771          if logged_model_ids is not None
 772          else list(deleted_logged_model_ids_older_than)
 773      )
 774  
 775      time_threshold = get_current_time_millis() - time_delta
 776      experiment_ids_to_delete = []
 777      if not skip_experiments:
 778          if experiment_ids:
 779              experiments = []
 780              for exp_id in experiment_ids:
 781                  try:
 782                      experiments.append(backend_store.get_experiment(exp_id))
 783                  except MlflowException as exc:
 784                      if ignore_not_found and exc.error_code == ErrorCode.Name(
 785                          RESOURCE_DOES_NOT_EXIST
 786                      ):
 787                          continue
 788                      raise
 789  
 790              # Ensure that the specified experiments are soft-deleted
 791              active_experiment_ids = [
 792                  e.experiment_id for e in experiments if e.lifecycle_stage != LifecycleStage.DELETED
 793              ]
 794              if active_experiment_ids:
 795                  raise MlflowException(
 796                      f"Experiments {active_experiment_ids} are not in the deleted lifecycle stage. "
 797                      "Only experiments in the deleted lifecycle stage can be hard-deleted.",
 798                      error_code=INVALID_PARAMETER_VALUE,
 799                  )
 800  
 801              # Ensure that the specified experiments are old enough
 802              if older_than:
 803                  non_old_experiment_ids = [
 804                      e.experiment_id
 805                      for e in experiments
 806                      if e.last_update_time is None or e.last_update_time >= time_threshold
 807                  ]
 808                  if non_old_experiment_ids:
 809                      raise MlflowException(
 810                          f"Experiments {non_old_experiment_ids} are not older than the required "
 811                          f"age. Only experiments older than {older_than} can be deleted.",
 812                          error_code=INVALID_PARAMETER_VALUE,
 813                      )
 814              experiment_ids_to_delete = list(experiment_ids)
 815          else:
 816              filter_string = f"last_update_time < {time_threshold}" if older_than else None
 817  
 818              def fetch_experiments(token=None):
 819                  page = backend_store.search_experiments(
 820                      view_type=ViewType.DELETED_ONLY,
 821                      filter_string=filter_string,
 822                      page_token=token,
 823                  )
 824                  return (page + fetch_experiments(page.token)) if page.token else page
 825  
 826              experiment_ids_to_delete = [exp.experiment_id for exp in fetch_experiments()]
 827  
 828          if experiment_ids_to_delete:
 829  
 830              def fetch_runs(token=None):
 831                  page = backend_store.search_runs(
 832                      experiment_ids=experiment_ids_to_delete,
 833                      filter_string="",
 834                      run_view_type=ViewType.DELETED_ONLY,
 835                      page_token=token,
 836                  )
 837                  return (page + fetch_runs(page.token)) if page.token else page
 838  
 839              run_ids_to_delete.extend([run.info.run_id for run in fetch_runs()])
 840  
 841      for run_id in set(run_ids_to_delete):
 842          try:
 843              run = backend_store.get_run(run_id)
 844          except MlflowException as exc:
 845              if ignore_not_found and exc.error_code == ErrorCode.Name(RESOURCE_DOES_NOT_EXIST):
 846                  continue
 847              raise
 848          if run.info.lifecycle_stage != LifecycleStage.DELETED:
 849              raise MlflowException(
 850                  f"Run {run_id} is not in `deleted` lifecycle stage. Only runs in"
 851                  " `deleted` lifecycle stage can be deleted."
 852              )
 853          # Raise MlflowException if run_id is newer than older_than parameter
 854          if older_than and run_id not in deleted_run_ids_older_than:
 855              raise MlflowException(
 856                  f"Run {run_id} is not older than the required age. "
 857                  f"Only runs older than {older_than} can be deleted.",
 858                  error_code=INVALID_PARAMETER_VALUE,
 859              )
 860          artifact_repo = get_artifact_repository(run.info.artifact_uri)
 861  
 862          try:
 863              artifact_repo.delete_artifacts()
 864          except InvalidUrlException as iue:
 865              click.echo(
 866                  click.style(
 867                      f"An exception {iue!r} was raised during the deletion of a model artifact",
 868                      fg="yellow",
 869                  )
 870              )
 871              click.echo(
 872                  click.style(
 873                      f"Unable to resolve the provided artifact URL: '{artifact_repo}'. "
 874                      "The gc process will continue and bypass artifact deletion. "
 875                      "Please ensure that the artifact exists "
 876                      "and consider manually deleting any unused artifacts. ",
 877                      fg="yellow",
 878                  ),
 879              )
 880  
 881          backend_store._hard_delete_run(run_id)
 882          click.echo(f"Run with ID {run_id} has been permanently deleted.")
 883  
 884      if not skip_logged_models:
 885          for model_id in set(logged_model_ids_to_delete):
 886              # First, check if the model exists (handles non-existent models correctly)
 887              try:
 888                  logged_model = backend_store.get_logged_model(model_id, allow_deleted=True)
 889              except MlflowException as exc:
 890                  if ignore_not_found and exc.error_code == ErrorCode.Name(RESOURCE_DOES_NOT_EXIST):
 891                      continue
 892                  raise
 893              # Model exists - now check if it's in the deleted lifecycle stage
 894              # (never skip active models, even with ignore_not_found)
 895              if model_id not in deleted_logged_model_ids:
 896                  raise MlflowException(
 897                      f"Logged model {model_id} is not in `deleted` lifecycle stage. "
 898                      "Only logged models in `deleted` lifecycle stage can be deleted."
 899                  )
 900              if older_than and model_id not in deleted_logged_model_ids_older_than:
 901                  raise MlflowException(
 902                      f"Logged model {model_id} is not older than the required age. "
 903                      f"Only logged models older than {older_than} can be deleted.",
 904                      error_code=INVALID_PARAMETER_VALUE,
 905                  )
 906              artifact_repo = get_artifact_repository(logged_model.artifact_location)
 907              try:
 908                  artifact_repo.delete_artifacts()
 909              except InvalidUrlException as iue:
 910                  click.echo(
 911                      click.style(
 912                          f"An exception {iue!r} was raised during the deletion of a model artifact",
 913                          fg="yellow",
 914                      )
 915                  )
 916                  click.echo(
 917                      click.style(
 918                          f"Unable to resolve the provided artifact URL: '{artifact_repo}'. "
 919                          "The gc process will continue and bypass artifact deletion. "
 920                          "Please ensure that the artifact exists "
 921                          "and consider manually deleting any unused artifacts. ",
 922                          fg="yellow",
 923                      ),
 924                  )
 925              backend_store._hard_delete_logged_model(model_id)
 926              click.echo(f"Logged model with ID {model_id} has been permanently deleted.")
 927  
 928      if not skip_experiments:
 929          for experiment_id in experiment_ids_to_delete:
 930              backend_store._hard_delete_experiment(experiment_id)
 931              click.echo(f"Experiment with ID {experiment_id} has been permanently deleted.")
 932  
 933  
 934  def _resolve_gc_workspaces(
 935      backend_store,
 936      all_workspaces: bool,
 937      workspace: str | None,
 938      backend_store_uri: str | None,
 939  ) -> list[str | None]:
 940      """
 941      Determine which workspaces to iterate over for garbage collection.
 942  
 943      Args:
 944          backend_store: The tracking store instance.
 945          all_workspaces: If True, return all workspaces from the workspace store.
 946          workspace: If provided, return a single-element list with this workspace.
 947          backend_store_uri: The backend store URI for resolving workspace store.
 948  
 949      Returns:
 950          List of workspace names to iterate over, or [None] for non-workspace mode.
 951      """
 952      supports_workspaces = (
 953          getattr(backend_store, "supports_workspaces", False) and MLFLOW_ENABLE_WORKSPACES.get()
 954      )
 955      if not supports_workspaces:
 956          if all_workspaces or workspace:
 957              raise MlflowException.invalid_parameter_value(
 958                  "Workspace selection flags are only supported when the tracking store "
 959                  "supports workspaces."
 960              )
 961          return [None]
 962  
 963      if all_workspaces:
 964          workspace_store = get_workspace_store(
 965              resolve_workspace_store_uri(tracking_uri=backend_store_uri)
 966          )
 967          workspaces = [ws.name for ws in workspace_store.list_workspaces()]
 968          if not workspaces:
 969              raise MlflowException(
 970                  "No workspaces found. Ensure the workspace provider is configured correctly.",
 971                  error_code=INVALID_PARAMETER_VALUE,
 972              )
 973          return workspaces
 974  
 975      if workspace:
 976          return [workspace]
 977  
 978      workspace_store = get_workspace_store(
 979          resolve_workspace_store_uri(tracking_uri=backend_store_uri)
 980      )
 981      default_workspace, supports_default = get_default_workspace_optional(workspace_store)
 982      if supports_default and default_workspace is not None:
 983          return [default_workspace.name]
 984  
 985      raise MlflowException.invalid_parameter_value(
 986          "Active workspace is required. Configure a default workspace, set MLFLOW_WORKSPACE "
 987          "or use --workspace/--all-workspaces when workspaces are enabled."
 988      )
 989  
 990  
 991  @cli.command(short_help="Permanently delete runs in the `deleted` lifecycle stage.")
 992  @click.option(
 993      "--older-than",
 994      default=None,
 995      help="Optional. Remove run(s) older than the specified time limit. "
 996      "Specify a string in #d#h#m#s format. Float values are also supported. "
 997      "For example: --older-than 1d2h3m4s, --older-than 1.2d3h4m5s",
 998  )
 999  @click.option(
1000      "--backend-store-uri",
1001      metavar="PATH",
1002      default=None,
1003      help="URI of the backend store from which to delete runs. Acceptable URIs are "
1004      "SQLAlchemy-compatible database connection strings "
1005      "(e.g. 'sqlite:///path/to/file.db') or local filesystem URIs "
1006      "(e.g. 'file:///absolute/path/to/directory'). By default, data will be deleted "
1007      "from the ./mlruns directory.",
1008  )
1009  @click.option(
1010      "--artifacts-destination",
1011      envvar="MLFLOW_ARTIFACTS_DESTINATION",
1012      metavar="URI",
1013      default=None,
1014      help=(
1015          "The base artifact location from which to resolve artifact upload/download/list requests "
1016          "(e.g. 's3://my-bucket'). This option only applies when the tracking server is configured "
1017          "to stream artifacts and the experiment's artifact root location is http or "
1018          "mlflow-artifacts URI. Otherwise, the default artifact location will be used."
1019      ),
1020  )
1021  @click.option(
1022      "--run-ids",
1023      default=None,
1024      help="Optional comma separated list of runs to be permanently deleted. If run ids"
1025      " are not specified, data is removed for all runs in the `deleted`"
1026      " lifecycle stage.",
1027  )
1028  @click.option(
1029      "--experiment-ids",
1030      default=None,
1031      help="Optional comma separated list of experiments to be permanently deleted including "
1032      "all of their associated runs. If experiment ids are not specified, data is removed for all "
1033      "experiments in the `deleted` lifecycle stage.",
1034  )
1035  @click.option(
1036      "--logged-model-ids",
1037      default=None,
1038      help="Optional comma separated list of logged model IDs to be permanently deleted."
1039      " If logged model IDs are not specified, data is removed for all logged models in the `deleted`"
1040      " lifecycle stage.",
1041  )
1042  @click.option(
1043      "--jobs",
1044      is_flag=True,
1045      default=False,
1046      help="Enable job cleanup. Without this flag, no jobs will be deleted."
1047      " When enabled, all jobs are deleted unless filtered by --older-than or --job-ids."
1048      " This option only works with database backends.",
1049  )
1050  @click.option(
1051      "--job-ids",
1052      default=None,
1053      help="Optional comma separated list of job IDs to be permanently deleted."
1054      " Can be used with or without --jobs flag."
1055      " If --older-than is also specified, only jobs matching both filters are deleted.",
1056  )
1057  @click.option(
1058      "--tracking-uri",
1059      default=os.environ.get("MLFLOW_TRACKING_URI"),
1060      help="Tracking URI to use for deleting 'deleted' runs e.g. http://127.0.0.1:8080",
1061  )
1062  @click.option(
1063      "--workspace",
1064      envvar=MLFLOW_WORKSPACE.name,
1065      default=None,
1066      help=(
1067          "Target workspace for deletions when workspaces are enabled. Defaults to the active "
1068          "workspace (MLFLOW_WORKSPACE)."
1069      ),
1070  )
1071  @click.option(
1072      "--all-workspaces",
1073      is_flag=True,
1074      default=False,
1075      help="Delete deleted resources across all workspaces (workspace mode only).",
1076  )
1077  @click.pass_context
1078  def gc(
1079      ctx,
1080      older_than,
1081      backend_store_uri,
1082      artifacts_destination,
1083      run_ids,
1084      experiment_ids,
1085      logged_model_ids,
1086      jobs,
1087      job_ids,
1088      tracking_uri,
1089      workspace,
1090      all_workspaces,
1091  ):
1092      """
1093      Permanently delete runs in the `deleted` lifecycle stage from the specified backend store.
1094      This command deletes all artifacts and metadata associated with the specified runs.
1095      If the provided artifact URL is invalid, the artifact deletion will be bypassed,
1096      and the gc process will continue.
1097  
1098      .. attention::
1099  
1100          If you are running an MLflow tracking server with artifact proxying enabled,
1101          you **must** set the ``MLFLOW_TRACKING_URI`` environment variable before running
1102          this command. Otherwise, the ``gc`` command will not be able to resolve
1103          artifact URIs and will not be able to delete the associated artifacts.
1104  
1105      **What gets deleted:**
1106  
1107      This command permanently removes:
1108  
1109      - **Run metadata**: Parameters, metrics, tags, and all other run information from the
1110        backend store
1111      - **Artifacts**: All files stored in the run's artifact location (models, plots, data
1112        files, etc.)
1113      - **Experiment metadata**: When deleting experiments, removes the experiment record and
1114        all associated data
1115      - **Job records**: When using the --jobs flag, removes historical job records from the
1116        jobs table
1117  
1118      .. note::
1119  
1120          This command only considers lifecycle stage and the specified deletion criteria.
1121          It does **not** check for pinned runs, registered models, or tags. Pinning is a
1122          UI-only feature that has no effect on garbage collection. Runs must be in the
1123          `deleted` lifecycle stage before they can be permanently deleted.
1124  
1125      **Examples:**
1126  
1127      .. code-block:: bash
1128  
1129          # Delete all runs that have been in the deleted state for more than 30 days
1130          mlflow gc --older-than 30d
1131  
1132          # Delete specific runs by ID (they must be in deleted state)
1133          mlflow gc --run-ids 'run1,run2,run3'
1134  
1135          # Delete all runs in specific experiments (experiments must be in deleted state)
1136          mlflow gc --experiment-ids 'exp1,exp2'
1137  
1138          # Combine criteria: delete runs older than 7 days in specific experiments
1139          mlflow gc --older-than 7d --experiment-ids 'exp1,exp2'
1140  
1141          # Delete deleted resources across all workspaces
1142          mlflow gc --all-workspaces --older-than 30d
1143  
1144          # Delete all finalized jobs older than 7 days (requires --jobs flag)
1145          mlflow gc --jobs --older-than 7d
1146  
1147          # Delete specific jobs by ID
1148          mlflow gc --job-ids 'job1,job2,job3'
1149  
1150      """
1151      if (workspace or all_workspaces) and not MLFLOW_ENABLE_WORKSPACES.get():
1152          os.environ[MLFLOW_ENABLE_WORKSPACES.name] = "true"
1153      backend_store = _get_store(backend_store_uri, artifacts_destination)
1154      # Only error if --workspace was explicitly provided on CLI (not from env var)
1155      workspace_from_cli = ctx.get_parameter_source("workspace") == ParameterSource.COMMANDLINE
1156      if workspace_from_cli and all_workspaces:
1157          raise UsageError("Cannot use --workspace and --all-workspaces together.")
1158      # If --all-workspaces is set, ignore workspace from env var
1159      if all_workspaces:
1160          workspace = None
1161      skip_experiments = False
1162      skip_logged_models = False
1163      if not hasattr(backend_store, "_hard_delete_run"):
1164          raise MlflowException(
1165              "This cli can only be used with a backend that allows hard-deleting runs"
1166          )
1167  
1168      if not hasattr(backend_store, "_hard_delete_experiment"):
1169          warnings.warn(
1170              "The specified backend does not allow hard-deleting experiments. Experiments"
1171              " will be skipped.",
1172              FutureWarning,
1173              stacklevel=2,
1174          )
1175          skip_experiments = True
1176  
1177      if not hasattr(backend_store, "_hard_delete_logged_model"):
1178          warnings.warn(
1179              "The specified backend does not allow hard-deleting logged models. Logged models"
1180              " will be skipped.",
1181              FutureWarning,
1182              stacklevel=2,
1183          )
1184          skip_logged_models = True
1185  
1186      time_delta = 0
1187  
1188      if older_than is not None:
1189          regex = re.compile(
1190              r"^((?P<days>[\.\d]+?)d)?((?P<hours>[\.\d]+?)h)?((?P<minutes>[\.\d]+?)m)"
1191              r"?((?P<seconds>[\.\d]+?)s)?$"
1192          )
1193          parts = regex.match(older_than)
1194          if parts is None:
1195              raise MlflowException(
1196                  f"Could not parse any time information from '{older_than}'. "
1197                  "Examples of valid strings: '8h', '2d8h5m20s', '2m4s'",
1198                  error_code=INVALID_PARAMETER_VALUE,
1199              )
1200          time_params = {name: float(param) for name, param in parts.groupdict().items() if param}
1201          time_delta = int(timedelta(**time_params).total_seconds() * 1000)
1202  
1203      if tracking_uri:
1204          set_tracking_uri(tracking_uri)
1205  
1206      if not is_tracking_uri_set():
1207          raise MlflowException(
1208              "Tracking URL is not set. Please set MLFLOW_TRACKING_URI environment variable "
1209              "or provide --tracking-uri cli option."
1210          )
1211  
1212      # Parse comma-separated IDs into lists
1213      run_ids_list = run_ids.split(",") if run_ids else None
1214      experiment_ids_list = experiment_ids.split(",") if experiment_ids else None
1215      logged_model_ids_list = logged_model_ids.split(",") if logged_model_ids else None
1216  
1217      # Prepare job cleanup if requested (database backends only)
1218      job_store = None
1219      job_ids_list = None
1220      if jobs or job_ids:
1221          from mlflow.utils.uri import extract_db_type_from_uri
1222  
1223          store_uri = backend_store_uri or os.environ.get("MLFLOW_BACKEND_STORE_URI")
1224          try:
1225              extract_db_type_from_uri(store_uri)
1226          except MlflowException:
1227              # Not a database backend - skip job cleanup silently
1228              pass
1229          else:
1230              if MLFLOW_ENABLE_WORKSPACES.get():
1231                  from mlflow.store.jobs.sqlalchemy_workspace_store import (
1232                      WorkspaceAwareSqlAlchemyJobStore,
1233                  )
1234  
1235                  job_store = WorkspaceAwareSqlAlchemyJobStore(store_uri)
1236              else:
1237                  from mlflow.store.jobs.sqlalchemy_store import SqlAlchemyJobStore
1238  
1239                  job_store = SqlAlchemyJobStore(store_uri)
1240              job_ids_list = job_ids.split(",") if job_ids else None
1241  
1242      for workspace_name in _resolve_gc_workspaces(
1243          backend_store=backend_store,
1244          all_workspaces=all_workspaces,
1245          workspace=workspace,
1246          backend_store_uri=backend_store_uri,
1247      ):
1248          workspace_ctx = (
1249              workspace_context.WorkspaceContext(workspace_name)
1250              if workspace_name
1251              else contextlib.nullcontext()
1252          )
1253          with workspace_ctx:
1254              _gc_tracking_resources(
1255                  backend_store=backend_store,
1256                  run_ids=run_ids_list,
1257                  experiment_ids=experiment_ids_list,
1258                  logged_model_ids=logged_model_ids_list,
1259                  older_than=older_than,
1260                  time_delta=time_delta,
1261                  skip_experiments=skip_experiments,
1262                  skip_logged_models=skip_logged_models,
1263                  ignore_not_found=all_workspaces,
1264              )
1265  
1266              # Clean up jobs within the same workspace context
1267              if job_store is not None:
1268                  deleted_job_ids = job_store.delete_jobs(older_than=time_delta, job_ids=job_ids_list)
1269                  for job_id in deleted_job_ids:
1270                      click.echo(f"Job with ID {job_id} has been permanently deleted.")
1271  
1272  
1273  @cli.command(short_help="Prints out useful information for debugging issues with MLflow.")
1274  @click.option(
1275      "--mask-envs",
1276      is_flag=True,
1277      help=(
1278          "If set (the default behavior without setting this flag is not to obfuscate information), "
1279          'mask the MLflow environment variable values (e.g. `"MLFLOW_ENV_VAR": "***"`) '
1280          "in the output to prevent leaking sensitive information."
1281      ),
1282  )
1283  def doctor(mask_envs):
1284      mlflow.doctor(mask_envs)
1285  
1286  
1287  cli.add_command(mlflow.deployments.cli.commands)
1288  cli.add_command(mlflow.experiments.commands)
1289  cli.add_command(mlflow.store.artifact.cli.commands)
1290  cli.add_command(mlflow.runs.commands)
1291  cli.add_command(mlflow.db.commands)
1292  
1293  from mlflow.store.fs2db.cli import migrate_filestore
1294  
1295  cli.add_command(migrate_filestore)
1296  
1297  # Add traces CLI commands
1298  from mlflow.cli import traces
1299  
1300  cli.add_command(traces.commands)
1301  
1302  # Add scorers CLI commands
1303  from mlflow.cli import scorers
1304  
1305  cli.add_command(scorers.commands)
1306  
1307  # Add datasets CLI commands
1308  from mlflow.cli import datasets
1309  
1310  cli.add_command(datasets.commands)
1311  
1312  # Add demo CLI command
1313  from mlflow.cli.demo import demo
1314  
1315  cli.add_command(demo)
1316  
1317  # Add AI commands CLI
1318  cli.add_command(ai_commands.commands)
1319  
1320  try:
1321      from mlflow.mcp.cli import cli as mcp_cli
1322  
1323      cli.add_command(mcp_cli)
1324  except ImportError:
1325      pass
1326  
1327  # Add Claude Code integration commands
1328  try:
1329      import mlflow.claude_code.cli
1330  
1331      cli.add_command(mlflow.claude_code.cli.commands)
1332  except ImportError:
1333      pass
1334  
1335  # Add Assistant CLI commands
1336  try:
1337      import mlflow.assistant.cli
1338  
1339      cli.add_command(mlflow.assistant.cli.commands)
1340  except ImportError:
1341      pass
1342  
1343  # We are conditional loading these commands since the skinny client does
1344  # not support them due to the pandas and numpy dependencies of MLflow Models
1345  try:
1346      import mlflow.models.cli
1347  
1348      cli.add_command(mlflow.models.cli.commands)
1349  except ImportError:
1350      pass
1351  
1352  try:
1353      import mlflow.sagemaker.cli
1354  
1355      cli.add_command(mlflow.sagemaker.cli.commands)
1356  except ImportError:
1357      pass
1358  
1359  
1360  with contextlib.suppress(ImportError):
1361      import mlflow.gateway.cli
1362  
1363      cli.add_command(mlflow.gateway.cli.commands)
1364  
1365  # Add crypto CLI commands
1366  with contextlib.suppress(ImportError):
1367      from mlflow.cli import crypto
1368  
1369      cli.add_command(crypto.commands)
1370  
1371  if __name__ == "__main__":
1372      cli()