/ hermes_cli / codex_models.py
codex_models.py
  1  """Codex model discovery from API, local cache, and config."""
  2  
  3  from __future__ import annotations
  4  
  5  import json
  6  import logging
  7  from pathlib import Path
  8  from typing import List, Optional
  9  
 10  import os
 11  
 12  logger = logging.getLogger(__name__)
 13  
 14  DEFAULT_CODEX_MODELS: List[str] = [
 15      "gpt-5.5",
 16      "gpt-5.4-mini",
 17      "gpt-5.4",
 18      "gpt-5.3-codex",
 19      "gpt-5.2-codex",
 20      "gpt-5.1-codex-max",
 21      "gpt-5.1-codex-mini",
 22  ]
 23  
 24  _FORWARD_COMPAT_TEMPLATE_MODELS: List[tuple[str, tuple[str, ...]]] = [
 25      ("gpt-5.5", ("gpt-5.4", "gpt-5.4-mini", "gpt-5.3-codex")),
 26      ("gpt-5.4-mini", ("gpt-5.3-codex", "gpt-5.2-codex")),
 27      ("gpt-5.4", ("gpt-5.3-codex", "gpt-5.2-codex")),
 28      ("gpt-5.3-codex", ("gpt-5.2-codex",)),
 29  ]
 30  
 31  
 32  def _add_forward_compat_models(model_ids: List[str]) -> List[str]:
 33      """Add Clawdbot-style synthetic forward-compat Codex models.
 34  
 35      If a newer Codex slug isn't returned by live discovery, surface it when an
 36      older compatible template model is present. This mirrors Clawdbot's
 37      synthetic catalog / forward-compat behavior for GPT-5 Codex variants.
 38      """
 39      ordered: List[str] = []
 40      seen: set[str] = set()
 41      for model_id in model_ids:
 42          if model_id not in seen:
 43              ordered.append(model_id)
 44              seen.add(model_id)
 45  
 46      for synthetic_model, template_models in _FORWARD_COMPAT_TEMPLATE_MODELS:
 47          if synthetic_model in seen:
 48              continue
 49          if any(template in seen for template in template_models):
 50              ordered.append(synthetic_model)
 51              seen.add(synthetic_model)
 52  
 53      return ordered
 54  
 55  
 56  def _fetch_models_from_api(access_token: str) -> List[str]:
 57      """Fetch available models from the Codex API. Returns visible models sorted by priority."""
 58      try:
 59          import httpx
 60          resp = httpx.get(
 61              "https://chatgpt.com/backend-api/codex/models?client_version=1.0.0",
 62              headers={"Authorization": f"Bearer {access_token}"},
 63              timeout=10,
 64          )
 65          if resp.status_code != 200:
 66              return []
 67          data = resp.json()
 68          entries = data.get("models", []) if isinstance(data, dict) else []
 69      except Exception as exc:
 70          logger.debug("Failed to fetch Codex models from API: %s", exc)
 71          return []
 72  
 73      sortable = []
 74      for item in entries:
 75          if not isinstance(item, dict):
 76              continue
 77          slug = item.get("slug")
 78          if not isinstance(slug, str) or not slug.strip():
 79              continue
 80          slug = slug.strip()
 81          if item.get("supported_in_api") is False:
 82              continue
 83          visibility = item.get("visibility", "")
 84          if isinstance(visibility, str) and visibility.strip().lower() in ("hide", "hidden"):
 85              continue
 86          priority = item.get("priority")
 87          rank = int(priority) if isinstance(priority, (int, float)) else 10_000
 88          sortable.append((rank, slug))
 89  
 90      sortable.sort(key=lambda x: (x[0], x[1]))
 91      return _add_forward_compat_models([slug for _, slug in sortable])
 92  
 93  
 94  def _read_default_model(codex_home: Path) -> Optional[str]:
 95      config_path = codex_home / "config.toml"
 96      if not config_path.exists():
 97          return None
 98      try:
 99          import tomllib
100      except Exception:
101          return None
102      try:
103          payload = tomllib.loads(config_path.read_text(encoding="utf-8"))
104      except Exception:
105          return None
106      model = payload.get("model") if isinstance(payload, dict) else None
107      if isinstance(model, str) and model.strip():
108          return model.strip()
109      return None
110  
111  
112  def _read_cache_models(codex_home: Path) -> List[str]:
113      cache_path = codex_home / "models_cache.json"
114      if not cache_path.exists():
115          return []
116      try:
117          raw = json.loads(cache_path.read_text(encoding="utf-8"))
118      except Exception:
119          return []
120  
121      entries = raw.get("models") if isinstance(raw, dict) else None
122      sortable = []
123      if isinstance(entries, list):
124          for item in entries:
125              if not isinstance(item, dict):
126                  continue
127              slug = item.get("slug")
128              if not isinstance(slug, str) or not slug.strip():
129                  continue
130              slug = slug.strip()
131              if item.get("supported_in_api") is False:
132                  continue
133              visibility = item.get("visibility")
134              if isinstance(visibility, str) and visibility.strip().lower() in ("hide", "hidden"):
135                  continue
136              priority = item.get("priority")
137              rank = int(priority) if isinstance(priority, (int, float)) else 10_000
138              sortable.append((rank, slug))
139  
140      sortable.sort(key=lambda item: (item[0], item[1]))
141      deduped: List[str] = []
142      for _, slug in sortable:
143          if slug not in deduped:
144              deduped.append(slug)
145      return deduped
146  
147  
148  def get_codex_model_ids(access_token: Optional[str] = None) -> List[str]:
149      """Return available Codex model IDs, trying API first, then local sources.
150      
151      Resolution order: API (live, if token provided) > config.toml default >
152      local cache > hardcoded defaults.
153      """
154      codex_home_str = os.getenv("CODEX_HOME", "").strip() or str(Path.home() / ".codex")
155      codex_home = Path(codex_home_str).expanduser()
156      ordered: List[str] = []
157  
158      # Try live API if we have a token
159      if access_token:
160          api_models = _fetch_models_from_api(access_token)
161          if api_models:
162              return _add_forward_compat_models(api_models)
163  
164      # Fall back to local sources
165      default_model = _read_default_model(codex_home)
166      if default_model:
167          ordered.append(default_model)
168  
169      for model_id in _read_cache_models(codex_home):
170          if model_id not in ordered:
171              ordered.append(model_id)
172  
173      for model_id in DEFAULT_CODEX_MODELS:
174          if model_id not in ordered:
175              ordered.append(model_id)
176  
177      return _add_forward_compat_models(ordered)