Merge pull request #1121 from 0xbyt4/fix/anthropic-adapter-issues

fix: anthropic adapter — max_tokens, fallback crash, proxy base_url
This commit is contained in:
Teknium
2026-03-12 19:07:06 -07:00
committed by GitHub
2 changed files with 162 additions and 10 deletions

View File

@@ -451,7 +451,7 @@ class AIAgent:
from agent.anthropic_adapter import resolve_anthropic_token
effective_key = resolve_anthropic_token() or ""
self._anthropic_api_key = effective_key
self._anthropic_client = build_anthropic_client(effective_key, base_url if base_url and "anthropic" in base_url else None)
self._anthropic_client = build_anthropic_client(effective_key, base_url)
# No OpenAI client needed for Anthropic mode
self.client = None
self._client_kwargs = {}
@@ -2598,14 +2598,10 @@ class AIAgent:
fb_api_mode = "chat_completions"
if fb_provider == "openai-codex":
fb_api_mode = "codex_responses"
elif fb_provider == "anthropic":
fb_api_mode = "anthropic_messages"
fb_base_url = str(fb_client.base_url)
# Swap client and config in-place
self.client = fb_client
self._client_kwargs = {
"api_key": fb_client.api_key,
"base_url": fb_base_url,
}
old_model = self.model
self.model = fb_model
self.provider = fb_provider
@@ -2613,10 +2609,27 @@ class AIAgent:
self.api_mode = fb_api_mode
self._fallback_activated = True
if fb_api_mode == "anthropic_messages":
# Build native Anthropic client instead of using OpenAI client
from agent.anthropic_adapter import build_anthropic_client, resolve_anthropic_token
effective_key = fb_client.api_key or resolve_anthropic_token() or ""
self._anthropic_api_key = effective_key
self._anthropic_client = build_anthropic_client(effective_key)
self.client = None
self._client_kwargs = {}
else:
# Swap OpenAI client and config in-place
self.client = fb_client
self._client_kwargs = {
"api_key": fb_client.api_key,
"base_url": fb_base_url,
}
# Re-evaluate prompt caching for the new provider/model
is_native_anthropic = fb_api_mode == "anthropic_messages"
self._use_prompt_caching = (
"openrouter" in fb_base_url.lower()
and "claude" in fb_model.lower()
("openrouter" in fb_base_url.lower() and "claude" in fb_model.lower())
or is_native_anthropic
)
print(
@@ -2642,7 +2655,7 @@ class AIAgent:
model=self.model,
messages=api_messages,
tools=self.tools,
max_tokens=None,
max_tokens=self.max_tokens,
reasoning_config=self.reasoning_config,
)

View File

@@ -1564,3 +1564,142 @@ class TestSafeWriter:
# Still just one layer
wrapped.write("test")
assert inner.getvalue() == "test"
# ===================================================================
# Anthropic adapter integration fixes
# ===================================================================
class TestBuildApiKwargsAnthropicMaxTokens:
"""Bug fix: max_tokens was always None for Anthropic mode, ignoring user config."""
def test_max_tokens_passed_to_anthropic(self, agent):
agent.api_mode = "anthropic_messages"
agent.max_tokens = 4096
agent.reasoning_config = None
with patch("agent.anthropic_adapter.build_anthropic_kwargs") as mock_build:
mock_build.return_value = {"model": "claude-sonnet-4-20250514", "messages": [], "max_tokens": 4096}
agent._build_api_kwargs([{"role": "user", "content": "test"}])
_, kwargs = mock_build.call_args
if not kwargs:
kwargs = dict(zip(
["model", "messages", "tools", "max_tokens", "reasoning_config"],
mock_build.call_args[0],
))
assert kwargs.get("max_tokens") == 4096 or mock_build.call_args[1].get("max_tokens") == 4096
def test_max_tokens_none_when_unset(self, agent):
agent.api_mode = "anthropic_messages"
agent.max_tokens = None
agent.reasoning_config = None
with patch("agent.anthropic_adapter.build_anthropic_kwargs") as mock_build:
mock_build.return_value = {"model": "claude-sonnet-4-20250514", "messages": [], "max_tokens": 16384}
agent._build_api_kwargs([{"role": "user", "content": "test"}])
call_args = mock_build.call_args
# max_tokens should be None (let adapter use its default)
if call_args[1]:
assert call_args[1].get("max_tokens") is None
else:
assert call_args[0][3] is None
class TestFallbackAnthropicProvider:
"""Bug fix: _try_activate_fallback had no case for anthropic provider."""
def test_fallback_to_anthropic_sets_api_mode(self, agent):
agent._fallback_activated = False
agent._fallback_model = {"provider": "anthropic", "model": "claude-sonnet-4-20250514"}
mock_client = MagicMock()
mock_client.base_url = "https://api.anthropic.com/v1"
mock_client.api_key = "sk-ant-api03-test"
with (
patch("agent.auxiliary_client.resolve_provider_client", return_value=(mock_client, None)),
patch("agent.anthropic_adapter.build_anthropic_client") as mock_build,
patch("agent.anthropic_adapter.resolve_anthropic_token", return_value=None),
):
mock_build.return_value = MagicMock()
result = agent._try_activate_fallback()
assert result is True
assert agent.api_mode == "anthropic_messages"
assert agent._anthropic_client is not None
assert agent.client is None
def test_fallback_to_anthropic_enables_prompt_caching(self, agent):
agent._fallback_activated = False
agent._fallback_model = {"provider": "anthropic", "model": "claude-sonnet-4-20250514"}
mock_client = MagicMock()
mock_client.base_url = "https://api.anthropic.com/v1"
mock_client.api_key = "sk-ant-api03-test"
with (
patch("agent.auxiliary_client.resolve_provider_client", return_value=(mock_client, None)),
patch("agent.anthropic_adapter.build_anthropic_client", return_value=MagicMock()),
patch("agent.anthropic_adapter.resolve_anthropic_token", return_value=None),
):
agent._try_activate_fallback()
assert agent._use_prompt_caching is True
def test_fallback_to_openrouter_uses_openai_client(self, agent):
agent._fallback_activated = False
agent._fallback_model = {"provider": "openrouter", "model": "anthropic/claude-sonnet-4"}
mock_client = MagicMock()
mock_client.base_url = "https://openrouter.ai/api/v1"
mock_client.api_key = "sk-or-test"
with patch("agent.auxiliary_client.resolve_provider_client", return_value=(mock_client, None)):
result = agent._try_activate_fallback()
assert result is True
assert agent.api_mode == "chat_completions"
assert agent.client is mock_client
class TestAnthropicBaseUrlPassthrough:
"""Bug fix: base_url was filtered with 'anthropic in base_url', blocking proxies."""
def test_custom_proxy_base_url_passed_through(self):
with (
patch("run_agent.get_tool_definitions", return_value=_make_tool_defs("web_search")),
patch("run_agent.check_toolset_requirements", return_value={}),
patch("agent.anthropic_adapter.build_anthropic_client") as mock_build,
):
mock_build.return_value = MagicMock()
a = AIAgent(
api_key="sk-ant-api03-test1234567890",
base_url="https://llm-proxy.company.com/v1",
api_mode="anthropic_messages",
quiet_mode=True,
skip_context_files=True,
skip_memory=True,
)
call_args = mock_build.call_args
# base_url should be passed through, not filtered out
assert call_args[0][1] == "https://llm-proxy.company.com/v1"
def test_none_base_url_passed_as_none(self):
with (
patch("run_agent.get_tool_definitions", return_value=_make_tool_defs("web_search")),
patch("run_agent.check_toolset_requirements", return_value={}),
patch("agent.anthropic_adapter.build_anthropic_client") as mock_build,
):
mock_build.return_value = MagicMock()
a = AIAgent(
api_key="sk-ant-api03-test1234567890",
api_mode="anthropic_messages",
quiet_mode=True,
skip_context_files=True,
skip_memory=True,
)
call_args = mock_build.call_args
# No base_url provided, should be default empty string or None
passed_url = call_args[0][1]
assert not passed_url or passed_url is None