refactor: drop codex oauth model warning

This commit is contained in:
teknium1
2026-03-13 21:18:29 -07:00
parent 529729831c
commit 899cb52e7a
4 changed files with 3 additions and 15 deletions

View File

@@ -1059,10 +1059,6 @@ def _model_flow_openai_codex(config, current_model=""):
pass
codex_models = get_codex_model_ids(access_token=_codex_token)
if "gpt-5.4" not in codex_models:
print("Note: `gpt-5.4` is not currently supported for ChatGPT/Codex OAuth accounts.")
print("Use OpenRouter if you need GPT-5.4 specifically.")
print()
selected = _prompt_model_selection(codex_models, current_model=current_model)
if selected:

View File

@@ -1275,10 +1275,6 @@ def setup_model_provider(config: dict):
logger.debug("Could not resolve Codex runtime credentials for model list: %s", exc)
codex_models = get_codex_model_ids(access_token=codex_token)
if "gpt-5.4" not in codex_models:
print_warning("`gpt-5.4` is not currently supported for ChatGPT/Codex OAuth accounts.")
print_info("Use OpenRouter if you need GPT-5.4 specifically.")
print()
model_choices = codex_models + [f"Keep current ({current_model})"]
default_codex = 0

View File

@@ -97,7 +97,7 @@ def test_custom_setup_clears_active_oauth_provider(tmp_path, monkeypatch):
assert reloaded["model"]["default"] == "custom/model"
def test_codex_setup_uses_runtime_access_token_for_live_model_list(tmp_path, monkeypatch, capsys):
def test_codex_setup_uses_runtime_access_token_for_live_model_list(tmp_path, monkeypatch):
monkeypatch.setenv("HERMES_HOME", str(tmp_path))
monkeypatch.setenv("OPENROUTER_API_KEY", "or-test-key")
_clear_provider_env(monkeypatch)
@@ -136,10 +136,8 @@ def test_codex_setup_uses_runtime_access_token_for_live_model_list(tmp_path, mon
save_config(config)
reloaded = load_config()
output = capsys.readouterr().out
assert captured["access_token"] == "codex-access-token"
assert "not currently supported for ChatGPT/Codex OAuth accounts" in output
assert isinstance(reloaded["model"], dict)
assert reloaded["model"]["provider"] == "openai-codex"
assert reloaded["model"]["default"] == "gpt-5.2-codex"

View File

@@ -54,7 +54,7 @@ def test_get_codex_model_ids_falls_back_to_curated_defaults(tmp_path, monkeypatc
assert models[: len(DEFAULT_CODEX_MODELS)] == DEFAULT_CODEX_MODELS
def test_model_command_warns_when_gpt_5_4_is_unavailable_for_codex(monkeypatch, capsys):
def test_model_command_uses_runtime_access_token_for_codex_list(monkeypatch):
from hermes_cli.main import _model_flow_openai_codex
captured = {}
@@ -87,12 +87,10 @@ def test_model_command_warns_when_gpt_5_4_is_unavailable_for_codex(monkeypatch,
)
_model_flow_openai_codex({}, current_model="openai/gpt-5.4")
output = capsys.readouterr().out
assert captured["access_token"] == "codex-access-token"
assert captured["model_ids"] == ["gpt-5.2-codex", "gpt-5.2"]
assert "not currently supported for ChatGPT/Codex OAuth accounts" in output
assert "Use OpenRouter if you need GPT-5.4 specifically." in output
assert captured["current_model"] == "openai/gpt-5.4"
# ── Tests for _normalize_model_for_provider ──────────────────────────