46 lines
1.6 KiB
Bash
46 lines
1.6 KiB
Bash
|
|
#!/bin/bash
|
||
|
|
# Let Gemini-Timmy configure itself as Anthropic fallback.
|
||
|
|
# Hermes CLI won't accept --provider custom, so we use hermes setup flow.
|
||
|
|
# But first: prove Gemini works, then manually add fallback_model.
|
||
|
|
|
||
|
|
# Add Google Gemini as custom_provider + fallback_model in one shot
|
||
|
|
python3 << 'PYEOF'
|
||
|
|
import yaml, sys, os
|
||
|
|
|
||
|
|
config_path = os.path.expanduser("~/.hermes/config.yaml")
|
||
|
|
with open(config_path) as f:
|
||
|
|
config = yaml.safe_load(f)
|
||
|
|
|
||
|
|
# 1. Add Gemini to custom_providers if missing
|
||
|
|
providers = config.get("custom_providers", []) or []
|
||
|
|
has_gemini = any("gemini" in (p.get("name","").lower()) for p in providers)
|
||
|
|
if not has_gemini:
|
||
|
|
providers.append({
|
||
|
|
"name": "Google Gemini",
|
||
|
|
"base_url": "https://generativelanguage.googleapis.com/v1beta/openai",
|
||
|
|
"api_key_env": "GEMINI_API_KEY",
|
||
|
|
"model": "gemini-2.5-pro",
|
||
|
|
})
|
||
|
|
config["custom_providers"] = providers
|
||
|
|
print("+ Added Google Gemini custom provider")
|
||
|
|
|
||
|
|
# 2. Add fallback_model block if missing
|
||
|
|
if "fallback_model" not in config or not config.get("fallback_model"):
|
||
|
|
config["fallback_model"] = {
|
||
|
|
"provider": "custom",
|
||
|
|
"model": "gemini-2.5-pro",
|
||
|
|
"base_url": "https://generativelanguage.googleapis.com/v1beta/openai",
|
||
|
|
"api_key_env": "GEMINI_API_KEY",
|
||
|
|
}
|
||
|
|
print("+ Added fallback_model -> gemini-2.5-pro")
|
||
|
|
else:
|
||
|
|
print("= fallback_model already configured")
|
||
|
|
|
||
|
|
with open(config_path, "w") as f:
|
||
|
|
yaml.dump(config, f, default_flow_style=False, sort_keys=False)
|
||
|
|
|
||
|
|
print("\nDone. When Anthropic quota exhausts, Hermes will failover to Gemini 2.5 Pro.")
|
||
|
|
print("Primary: claude-opus-4-6 (Anthropic)")
|
||
|
|
print("Fallback: gemini-2.5-pro (Google AI)")
|
||
|
|
PYEOF
|