fix: explain codex oauth gpt-5.4 limits

This commit is contained in:
teknium1
2026-03-13 21:12:55 -07:00
parent 57e98fe6c9
commit 529729831c
6 changed files with 57 additions and 5 deletions

View File

@@ -13,7 +13,6 @@ logger = logging.getLogger(__name__)
DEFAULT_CODEX_MODELS: List[str] = [
"gpt-5.3-codex",
"gpt-5.4",
"gpt-5.2-codex",
"gpt-5.1-codex-max",
"gpt-5.1-codex-mini",

View File

@@ -1057,7 +1057,12 @@ def _model_flow_openai_codex(config, current_model=""):
_codex_token = _codex_creds.get("api_key")
except Exception:
pass
codex_models = get_codex_model_ids(access_token=_codex_token)
if "gpt-5.4" not in codex_models:
print("Note: `gpt-5.4` is not currently supported for ChatGPT/Codex OAuth accounts.")
print("Use OpenRouter if you need GPT-5.4 specifically.")
print()
selected = _prompt_model_selection(codex_models, current_model=current_model)
if selected:
@@ -1072,6 +1077,7 @@ def _model_flow_openai_codex(config, current_model=""):
print("No change.")
def _model_flow_custom(config):
"""Custom endpoint: collect URL, API key, and model name.

View File

@@ -41,7 +41,6 @@ _PROVIDER_MODELS: dict[str, list[str]] = {
],
"openai-codex": [
"gpt-5.3-codex",
"gpt-5.4",
"gpt-5.2-codex",
"gpt-5.1-codex-mini",
"gpt-5.1-codex-max",

View File

@@ -1275,6 +1275,11 @@ def setup_model_provider(config: dict):
logger.debug("Could not resolve Codex runtime credentials for model list: %s", exc)
codex_models = get_codex_model_ids(access_token=codex_token)
if "gpt-5.4" not in codex_models:
print_warning("`gpt-5.4` is not currently supported for ChatGPT/Codex OAuth accounts.")
print_info("Use OpenRouter if you need GPT-5.4 specifically.")
print()
model_choices = codex_models + [f"Keep current ({current_model})"]
default_codex = 0
if current_model in codex_models:

View File

@@ -97,7 +97,7 @@ def test_custom_setup_clears_active_oauth_provider(tmp_path, monkeypatch):
assert reloaded["model"]["default"] == "custom/model"
def test_codex_setup_uses_runtime_access_token_for_live_model_list(tmp_path, monkeypatch):
def test_codex_setup_uses_runtime_access_token_for_live_model_list(tmp_path, monkeypatch, capsys):
monkeypatch.setenv("HERMES_HOME", str(tmp_path))
monkeypatch.setenv("OPENROUTER_API_KEY", "or-test-key")
_clear_provider_env(monkeypatch)
@@ -125,7 +125,7 @@ def test_codex_setup_uses_runtime_access_token_for_live_model_list(tmp_path, mon
def _fake_get_codex_model_ids(access_token=None):
captured["access_token"] = access_token
return ["gpt-5.4", "gpt-5.3-codex"]
return ["gpt-5.2-codex", "gpt-5.2"]
monkeypatch.setattr(
"hermes_cli.codex_models.get_codex_model_ids",
@@ -136,9 +136,11 @@ def test_codex_setup_uses_runtime_access_token_for_live_model_list(tmp_path, mon
save_config(config)
reloaded = load_config()
output = capsys.readouterr().out
assert captured["access_token"] == "codex-access-token"
assert "not currently supported for ChatGPT/Codex OAuth accounts" in output
assert isinstance(reloaded["model"], dict)
assert reloaded["model"]["provider"] == "openai-codex"
assert reloaded["model"]["default"] == "gpt-5.4"
assert reloaded["model"]["default"] == "gpt-5.2-codex"
assert reloaded["model"]["base_url"] == "https://chatgpt.com/backend-api/codex"

View File

@@ -54,6 +54,47 @@ def test_get_codex_model_ids_falls_back_to_curated_defaults(tmp_path, monkeypatc
assert models[: len(DEFAULT_CODEX_MODELS)] == DEFAULT_CODEX_MODELS
def test_model_command_warns_when_gpt_5_4_is_unavailable_for_codex(monkeypatch, capsys):
from hermes_cli.main import _model_flow_openai_codex
captured = {}
monkeypatch.setattr(
"hermes_cli.auth.get_codex_auth_status",
lambda: {"logged_in": True},
)
monkeypatch.setattr(
"hermes_cli.auth.resolve_codex_runtime_credentials",
lambda *args, **kwargs: {"api_key": "codex-access-token"},
)
def _fake_get_codex_model_ids(access_token=None):
captured["access_token"] = access_token
return ["gpt-5.2-codex", "gpt-5.2"]
def _fake_prompt_model_selection(model_ids, current_model=""):
captured["model_ids"] = list(model_ids)
captured["current_model"] = current_model
return None
monkeypatch.setattr(
"hermes_cli.codex_models.get_codex_model_ids",
_fake_get_codex_model_ids,
)
monkeypatch.setattr(
"hermes_cli.auth._prompt_model_selection",
_fake_prompt_model_selection,
)
_model_flow_openai_codex({}, current_model="openai/gpt-5.4")
output = capsys.readouterr().out
assert captured["access_token"] == "codex-access-token"
assert captured["model_ids"] == ["gpt-5.2-codex", "gpt-5.2"]
assert "not currently supported for ChatGPT/Codex OAuth accounts" in output
assert "Use OpenRouter if you need GPT-5.4 specifically." in output
# ── Tests for _normalize_model_for_provider ──────────────────────────