/ tests / agent / test_auxiliary_client_anthropic_custom.py
test_auxiliary_client_anthropic_custom.py
  1  """Tests for agent.auxiliary_client._try_custom_endpoint's anthropic_messages branch.
  2  
  3  When a user configures a custom endpoint with ``api_mode: anthropic_messages``
  4  (e.g. MiniMax, Zhipu GLM, LiteLLM in Anthropic-proxy mode), auxiliary tasks
  5  (compression, web_extract, session_search, title generation) must use the
  6  native Anthropic transport rather than being silently downgraded to an
  7  OpenAI-wire client that speaks the wrong protocol.
  8  """
  9  
 10  from __future__ import annotations
 11  
 12  from unittest.mock import MagicMock, patch
 13  
 14  import pytest
 15  
 16  
 17  @pytest.fixture(autouse=True)
 18  def _clean_env(monkeypatch):
 19      for key in (
 20          "OPENAI_API_KEY", "OPENAI_BASE_URL",
 21          "ANTHROPIC_API_KEY", "ANTHROPIC_TOKEN",
 22      ):
 23          monkeypatch.delenv(key, raising=False)
 24  
 25  
 26  def _install_anthropic_adapter_mocks():
 27      """Patch build_anthropic_client so the test doesn't need the SDK."""
 28      fake_client = MagicMock(name="anthropic_client")
 29      return patch(
 30          "agent.anthropic_adapter.build_anthropic_client",
 31          return_value=fake_client,
 32      ), fake_client
 33  
 34  
 35  def test_custom_endpoint_anthropic_messages_builds_anthropic_wrapper():
 36      """api_mode=anthropic_messages → returns AnthropicAuxiliaryClient, not OpenAI."""
 37      from agent.auxiliary_client import _try_custom_endpoint, AnthropicAuxiliaryClient
 38  
 39      with patch(
 40          "agent.auxiliary_client._resolve_custom_runtime",
 41          return_value=(
 42              "https://api.minimax.io/anthropic",
 43              "minimax-key",
 44              "anthropic_messages",
 45          ),
 46      ), patch(
 47          "agent.auxiliary_client._read_main_model",
 48          return_value="claude-sonnet-4-6",
 49      ):
 50          adapter_patch, fake_client = _install_anthropic_adapter_mocks()
 51          with adapter_patch:
 52              client, model = _try_custom_endpoint()
 53  
 54      assert isinstance(client, AnthropicAuxiliaryClient), (
 55          "Custom endpoint with api_mode=anthropic_messages must return the "
 56          f"native Anthropic wrapper, got {type(client).__name__}"
 57      )
 58      assert model == "claude-sonnet-4-6"
 59      # Wrapper should NOT be marked as OAuth — third-party endpoints are
 60      # always API-key authenticated.
 61      assert client.api_key == "minimax-key"
 62      assert client.base_url == "https://api.minimax.io/anthropic"
 63  
 64  
 65  def test_custom_endpoint_anthropic_messages_falls_back_when_sdk_missing():
 66      """Graceful degradation when anthropic SDK is unavailable."""
 67      from agent.auxiliary_client import _try_custom_endpoint
 68  
 69      import_error = ImportError("anthropic package not installed")
 70  
 71      with patch(
 72          "agent.auxiliary_client._resolve_custom_runtime",
 73          return_value=("https://api.minimax.io/anthropic", "k", "anthropic_messages"),
 74      ), patch(
 75          "agent.auxiliary_client._read_main_model",
 76          return_value="claude-sonnet-4-6",
 77      ), patch(
 78          "agent.anthropic_adapter.build_anthropic_client",
 79          side_effect=import_error,
 80      ):
 81          client, model = _try_custom_endpoint()
 82  
 83      # Should fall back to an OpenAI-wire client rather than returning
 84      # (None, None) — the tool still needs to do *something*.
 85      assert client is not None
 86      assert model == "claude-sonnet-4-6"
 87      # OpenAI client, not AnthropicAuxiliaryClient.
 88      from agent.auxiliary_client import AnthropicAuxiliaryClient
 89      assert not isinstance(client, AnthropicAuxiliaryClient)
 90  
 91  
 92  def test_custom_endpoint_chat_completions_still_uses_openai_wire():
 93      """Regression: default path (no api_mode) must remain OpenAI client."""
 94      from agent.auxiliary_client import _try_custom_endpoint, AnthropicAuxiliaryClient
 95  
 96      with patch(
 97          "agent.auxiliary_client._resolve_custom_runtime",
 98          return_value=("https://api.example.com/v1", "key", None),
 99      ), patch(
100          "agent.auxiliary_client._read_main_model",
101          return_value="my-model",
102      ):
103          client, model = _try_custom_endpoint()
104  
105      assert client is not None
106      assert model == "my-model"
107      assert not isinstance(client, AnthropicAuxiliaryClient)