diff --git a/hermes_cli/codex_models.py b/hermes_cli/codex_models.py index 43722124a..9fe346714 100644 --- a/hermes_cli/codex_models.py +++ b/hermes_cli/codex_models.py @@ -13,7 +13,6 @@ logger = logging.getLogger(__name__) DEFAULT_CODEX_MODELS: List[str] = [ "gpt-5.3-codex", - "gpt-5.4", "gpt-5.2-codex", "gpt-5.1-codex-max", "gpt-5.1-codex-mini", diff --git a/hermes_cli/main.py b/hermes_cli/main.py index 14706f23b..52a2b98b6 100644 --- a/hermes_cli/main.py +++ b/hermes_cli/main.py @@ -1057,7 +1057,12 @@ def _model_flow_openai_codex(config, current_model=""): _codex_token = _codex_creds.get("api_key") except Exception: pass + codex_models = get_codex_model_ids(access_token=_codex_token) + if "gpt-5.4" not in codex_models: + print("Note: `gpt-5.4` is not currently supported for ChatGPT/Codex OAuth accounts.") + print("Use OpenRouter if you need GPT-5.4 specifically.") + print() selected = _prompt_model_selection(codex_models, current_model=current_model) if selected: @@ -1072,6 +1077,7 @@ def _model_flow_openai_codex(config, current_model=""): print("No change.") + def _model_flow_custom(config): """Custom endpoint: collect URL, API key, and model name. diff --git a/hermes_cli/models.py b/hermes_cli/models.py index 85c248c1b..d2d1bf463 100644 --- a/hermes_cli/models.py +++ b/hermes_cli/models.py @@ -41,7 +41,6 @@ _PROVIDER_MODELS: dict[str, list[str]] = { ], "openai-codex": [ "gpt-5.3-codex", - "gpt-5.4", "gpt-5.2-codex", "gpt-5.1-codex-mini", "gpt-5.1-codex-max", diff --git a/hermes_cli/setup.py b/hermes_cli/setup.py index 789f2b096..3e9ebee62 100644 --- a/hermes_cli/setup.py +++ b/hermes_cli/setup.py @@ -1275,6 +1275,11 @@ def setup_model_provider(config: dict): logger.debug("Could not resolve Codex runtime credentials for model list: %s", exc) codex_models = get_codex_model_ids(access_token=codex_token) + if "gpt-5.4" not in codex_models: + print_warning("`gpt-5.4` is not currently supported for ChatGPT/Codex OAuth accounts.") + print_info("Use OpenRouter if you need GPT-5.4 specifically.") + print() + model_choices = codex_models + [f"Keep current ({current_model})"] default_codex = 0 if current_model in codex_models: diff --git a/tests/hermes_cli/test_setup.py b/tests/hermes_cli/test_setup.py index 7e2443abb..12f709999 100644 --- a/tests/hermes_cli/test_setup.py +++ b/tests/hermes_cli/test_setup.py @@ -97,7 +97,7 @@ def test_custom_setup_clears_active_oauth_provider(tmp_path, monkeypatch): assert reloaded["model"]["default"] == "custom/model" -def test_codex_setup_uses_runtime_access_token_for_live_model_list(tmp_path, monkeypatch): +def test_codex_setup_uses_runtime_access_token_for_live_model_list(tmp_path, monkeypatch, capsys): monkeypatch.setenv("HERMES_HOME", str(tmp_path)) monkeypatch.setenv("OPENROUTER_API_KEY", "or-test-key") _clear_provider_env(monkeypatch) @@ -125,7 +125,7 @@ def test_codex_setup_uses_runtime_access_token_for_live_model_list(tmp_path, mon def _fake_get_codex_model_ids(access_token=None): captured["access_token"] = access_token - return ["gpt-5.4", "gpt-5.3-codex"] + return ["gpt-5.2-codex", "gpt-5.2"] monkeypatch.setattr( "hermes_cli.codex_models.get_codex_model_ids", @@ -136,9 +136,11 @@ def test_codex_setup_uses_runtime_access_token_for_live_model_list(tmp_path, mon save_config(config) reloaded = load_config() + output = capsys.readouterr().out assert captured["access_token"] == "codex-access-token" + assert "not currently supported for ChatGPT/Codex OAuth accounts" in output assert isinstance(reloaded["model"], dict) assert reloaded["model"]["provider"] == "openai-codex" - assert reloaded["model"]["default"] == "gpt-5.4" + assert reloaded["model"]["default"] == "gpt-5.2-codex" assert reloaded["model"]["base_url"] == "https://chatgpt.com/backend-api/codex" diff --git a/tests/test_codex_models.py b/tests/test_codex_models.py index 5e85e46ad..85ed6faad 100644 --- a/tests/test_codex_models.py +++ b/tests/test_codex_models.py @@ -54,6 +54,47 @@ def test_get_codex_model_ids_falls_back_to_curated_defaults(tmp_path, monkeypatc assert models[: len(DEFAULT_CODEX_MODELS)] == DEFAULT_CODEX_MODELS +def test_model_command_warns_when_gpt_5_4_is_unavailable_for_codex(monkeypatch, capsys): + from hermes_cli.main import _model_flow_openai_codex + + captured = {} + + monkeypatch.setattr( + "hermes_cli.auth.get_codex_auth_status", + lambda: {"logged_in": True}, + ) + monkeypatch.setattr( + "hermes_cli.auth.resolve_codex_runtime_credentials", + lambda *args, **kwargs: {"api_key": "codex-access-token"}, + ) + + def _fake_get_codex_model_ids(access_token=None): + captured["access_token"] = access_token + return ["gpt-5.2-codex", "gpt-5.2"] + + def _fake_prompt_model_selection(model_ids, current_model=""): + captured["model_ids"] = list(model_ids) + captured["current_model"] = current_model + return None + + monkeypatch.setattr( + "hermes_cli.codex_models.get_codex_model_ids", + _fake_get_codex_model_ids, + ) + monkeypatch.setattr( + "hermes_cli.auth._prompt_model_selection", + _fake_prompt_model_selection, + ) + + _model_flow_openai_codex({}, current_model="openai/gpt-5.4") + output = capsys.readouterr().out + + assert captured["access_token"] == "codex-access-token" + assert captured["model_ids"] == ["gpt-5.2-codex", "gpt-5.2"] + assert "not currently supported for ChatGPT/Codex OAuth accounts" in output + assert "Use OpenRouter if you need GPT-5.4 specifically." in output + + # ── Tests for _normalize_model_for_provider ──────────────────────────