test_openai_codex_model_validation_fallback.py
1 """Regression tests for OpenAI Codex model validation when the listing lags behind 2 actually usable backend model IDs. 3 4 The bug: `/model` and `switch_model()` reject `gpt-5.3-codex-spark` because the 5 OpenAI Codex listing omits it, even though direct runtime calls with 6 `--provider openai-codex -m gpt-5.3-codex-spark` succeed. 7 """ 8 9 from unittest.mock import patch 10 11 from hermes_cli.model_switch import switch_model 12 from hermes_cli.models import validate_requested_model 13 14 15 def test_openai_codex_unknown_but_plausible_model_is_accepted_with_warning(): 16 """If the Codex listing is incomplete, `/model` should soft-accept the model 17 with a warning instead of hard-rejecting it. 18 """ 19 with patch( 20 "hermes_cli.models.provider_model_ids", 21 return_value=["gpt-5.5", "gpt-5.4", "gpt-5.3-codex"], 22 ): 23 result = validate_requested_model("gpt-5.3-codex-spark", "openai-codex") 24 25 assert result["accepted"] is True 26 assert result["persist"] is True 27 assert result["recognized"] is False 28 assert "gpt-5.3-codex-spark" in result["message"] 29 assert "OpenAI Codex model listing" in result["message"] 30 assert "Similar models" in result["message"] 31 assert "gpt-5.3-codex" in result["message"] 32 33 34 def test_switch_model_allows_openai_codex_model_missing_from_listing(): 35 """switch_model() should succeed for Codex models that the runtime accepts 36 even when the listing has not caught up yet. 37 """ 38 with patch( 39 "hermes_cli.models.provider_model_ids", 40 return_value=["gpt-5.5", "gpt-5.4", "gpt-5.3-codex"], 41 ): 42 result = switch_model( 43 "gpt-5.3-codex-spark", 44 current_provider="openai-codex", 45 current_model="gpt-5.4", 46 current_base_url="", 47 current_api_key="", 48 user_providers=None, 49 ) 50 51 assert result.success is True 52 assert result.new_model == "gpt-5.3-codex-spark" 53 assert result.target_provider == "openai-codex" 54 assert result.warning_message 55 assert "OpenAI Codex model listing" in result.warning_message