Compare commits

..

2 Commits

Author SHA1 Message Date
7ac8d0268f test(cron): add tests for cloud-context warning injection
Some checks failed
Forge CI / smoke-and-build (pull_request) Failing after 1m13s
2026-04-14 01:18:16 +00:00
2e59f8540d fix(cron): inject cloud-context warning when prompt refs localhost
Fixes #378, Closes #456

When a cron job runs on a cloud endpoint but its prompt references
local services (Ollama, localhost ports, etc.), inject a SYSTEM NOTE
telling the agent it cannot reach localhost so it reports the
limitation instead of wasting iterations on doomed connections.
2026-04-14 01:17:25 +00:00
4 changed files with 151 additions and 190 deletions

View File

@@ -26,7 +26,7 @@ from cron.jobs import (
trigger_job,
JOBS_FILE,
)
from cron.scheduler import tick
from cron.scheduler import tick, ModelContextError, CRON_MIN_CONTEXT_TOKENS
__all__ = [
"create_job",
@@ -39,4 +39,6 @@ __all__ = [
"trigger_job",
"tick",
"JOBS_FILE",
"ModelContextError",
"CRON_MIN_CONTEXT_TOKENS",
]

View File

@@ -12,6 +12,7 @@ import asyncio
import concurrent.futures
import json
import logging
import re
import os
import subprocess
import sys
@@ -544,76 +545,58 @@ def _run_job_script(script_path: str) -> tuple[bool, str]:
except Exception as exc:
return False, f"Script execution failed: {exc}"
# ---------------------------------------------------------------------------
# Runtime classification & provider mismatch detection
# Cloud-context warning for local-service references (#378, #456)
# ---------------------------------------------------------------------------
_PROVIDER_ALIASES: dict[str, set[str]] = {
"ollama": {"ollama", "local ollama", "localhost:11434"},
"anthropic": {"anthropic", "claude", "sonnet", "opus", "haiku"},
"nous": {"nous", "mimo", "nousresearch"},
"openrouter": {"openrouter"},
"kimi": {"kimi", "moonshot"},
"openai": {"openai", "gpt", "codex"},
"gemini": {"gemini", "google"},
}
_CLOUD_PREFIXES = frozenset({"nous", "openrouter", "anthropic", "openai", "zai", "kimi", "gemini", "minimax"})
_LOCAL_SERVICE_PATTERNS = [
re.compile(r'localhost:\d+', re.IGNORECASE),
re.compile(r'127\.0\.0\.1:\d+'),
re.compile(r'check\s+ollama', re.IGNORECASE),
re.compile(r'ollama\s+(is\s+)?respond', re.IGNORECASE),
re.compile(r'curl\s+localhost', re.IGNORECASE),
re.compile(r'curl\s+127\.', re.IGNORECASE),
re.compile(r'curl\s+local', re.IGNORECASE),
re.compile(r'ping\s+localhost', re.IGNORECASE),
re.compile(r'poll(ing)?\s+local', re.IGNORECASE),
re.compile(r'check\s+service\s+respond', re.IGNORECASE),
re.compile(r'11434'), # Ollama default port
re.compile(r'11435'), # common alt Ollama port
]
def _classify_runtime(provider: str, model: str) -> str:
"""Return 'local' | 'cloud' | 'unknown'."""
p = (provider or "").strip().lower()
m = (model or "").strip().lower()
if p and p not in ("ollama", "local"):
return "cloud"
if "/" in m and m.split("/")[0] in _CLOUD_PREFIXES:
return "cloud"
if p in ("ollama", "local") or (not p and m):
return "local"
return "unknown"
def _detect_local_service_refs(prompt: str) -> list[str]:
"""Return list of local-service reference descriptions found in prompt."""
refs = []
for pat in _LOCAL_SERVICE_PATTERNS:
m = pat.search(prompt)
if m:
refs.append(m.group(0))
return refs
def _detect_provider_mismatch(prompt: str, active_provider: str) -> Optional[str]:
"""Return stale provider group referenced in prompt, or None."""
if not active_provider or not prompt:
return None
prompt_lower = prompt.lower()
active_lower = active_provider.lower().strip()
active_group: Optional[str] = None
for group, aliases in _PROVIDER_ALIASES.items():
if active_lower in aliases or active_lower.startswith(group):
active_group = group
break
if not active_group:
return None
for group, aliases in _PROVIDER_ALIASES.items():
if group == active_group:
continue
for alias in aliases:
if alias in prompt_lower:
return group
return None
def _inject_cloud_context(prompt: str, refs: list[str], provider: str) -> str:
"""Prepend a SYSTEM NOTE so the agent knows it cannot reach localhost."""
refs_str = ", ".join(f'"{r}"' for r in refs)
warning = (
"[SYSTEM NOTE — cloud endpoint]
"
f"You are running on a cloud inference endpoint ({provider}). "
f"Your prompt references local services: {refs_str}. "
"You CANNOT reach localhost or any local network address from this endpoint. "
"Do NOT attempt curl, ping, SSH, or any network calls to localhost. "
"Instead, report to the user that this job requires a local model endpoint "
"to check local services, and suggest they re-run with a local provider.
"
)
return warning + prompt
# ---------------------------------------------------------------------------
# Prompt builder
# ---------------------------------------------------------------------------
def _build_job_prompt(
job: dict,
*,
runtime_model: str = "",
runtime_provider: str = "",
) -> str:
"""Build the effective prompt for a cron job.
Args:
job: The cron job dict.
runtime_model: Resolved model name (e.g. "xiaomi/mimo-v2-pro").
runtime_provider: Resolved provider name (e.g. "nous", "openrouter").
"""
def _build_job_prompt(job: dict) -> str:
"""Build the effective prompt for a cron job, optionally loading one or more skills first."""
prompt = job.get("prompt", "")
skills = job.get("skills")
@@ -643,33 +626,6 @@ def _build_job_prompt(
f"{prompt}"
)
# Runtime context injection — tells the agent what it can actually do.
_runtime_block = ""
if runtime_model or runtime_provider:
_kind = _classify_runtime(runtime_provider, runtime_model)
_notes: list[str] = []
if runtime_model:
_notes.append(f"MODEL: {runtime_model}")
if runtime_provider:
_notes.append(f"PROVIDER: {runtime_provider}")
if _kind == "local":
_notes.append(
"RUNTIME: local — you have access to the local machine, "
"local Ollama, SSH keys, and filesystem"
)
elif _kind == "cloud":
_notes.append(
"RUNTIME: cloud API — you do NOT have local machine access. "
"Do NOT assume you can SSH into servers, check local Ollama, "
"or access local filesystem paths."
)
if _notes:
_runtime_block = (
"[SYSTEM: RUNTIME CONTEXT — "
+ "; ".join(_notes)
+ ". Adjust your approach based on these capabilities.]\\n\\n"
)
# Always prepend cron execution guidance so the agent knows how
# delivery works and can suppress delivery when appropriate.
cron_hint = (
@@ -689,9 +645,9 @@ def _build_job_prompt(
"response. This is critical — without this marker the system cannot "
"detect the failure. Examples: "
"\"[SCRIPT_FAILED]: forge.alexanderwhitestone.com timed out\" "
"\\\"[SCRIPT_FAILED]: script exited with code 1\\\".]\\\\n\\\\n"
"\"[SCRIPT_FAILED]: script exited with code 1\".]\\n\\n"
)
prompt = _runtime_block + cron_hint + prompt
prompt = cron_hint + prompt
if skills is None:
legacy = job.get("skill")
skills = [legacy] if legacy else []
@@ -761,32 +717,7 @@ def run_job(job: dict) -> tuple[bool, str, str, Optional[str]]:
job_id = job["id"]
job_name = job["name"]
# Early model/provider resolution for runtime context injection
_early_model = job.get("model") or os.getenv("HERMES_MODEL") or ""
_early_provider = os.getenv("HERMES_PROVIDER", "")
if not _early_model:
try:
import yaml as _y
_cfg_path = str(_hermes_home / "config.yaml")
if os.path.exists(_cfg_path):
with open(_cfg_path) as _f:
_cfg_early = _y.safe_load(_f) or {}
_mc = _cfg_early.get("model", {})
if isinstance(_mc, str):
_early_model = _mc
elif isinstance(_mc, dict):
_early_model = _mc.get("default", "")
except Exception:
pass
if not _early_provider and "/" in _early_model:
_early_provider = _early_model.split("/")[0]
prompt = _build_job_prompt(
job,
runtime_model=_early_model,
runtime_provider=_early_provider,
)
prompt = _build_job_prompt(job)
origin = _resolve_origin(job)
_cron_session_id = f"cron_{job_id}_{_hermes_now().strftime('%Y%m%d_%H%M%S')}"
@@ -898,17 +829,6 @@ def run_job(job: dict) -> tuple[bool, str, str, Optional[str]]:
message = format_runtime_provider_error(exc)
raise RuntimeError(message) from exc
# Provider mismatch warning
_resolved_provider = runtime.get("provider", "") or ""
_raw_prompt = job.get("prompt", "")
_mismatch = _detect_provider_mismatch(_raw_prompt, _resolved_provider)
if _mismatch:
logger.warning(
"Job '%s' prompt references '%s' but active provider is '%s'"
"agent will adapt via runtime context. Consider updating prompt.",
job_name, _mismatch, _resolved_provider,
)
from agent.smart_model_routing import resolve_turn_route
turn_route = resolve_turn_route(
prompt,
@@ -947,6 +867,18 @@ def run_job(job: dict) -> tuple[bool, str, str, Optional[str]]:
job_name,
)
# Inject cloud-context warning when prompt references local services (#378)
if _is_cloud:
_local_refs = _detect_local_service_refs(prompt)
if _local_refs:
_provider_name = turn_route["runtime"].get("provider", "cloud")
prompt = _inject_cloud_context(prompt, _local_refs, _provider_name)
logger.info(
"Job '%s': injected cloud-context warning for local refs: %s",
job_name,
_local_refs,
)
_agent_kwargs = _safe_agent_kwargs({
"model": turn_route["model"],
"api_key": turn_route["runtime"].get("api_key"),

View File

@@ -0,0 +1,91 @@
"""Tests for cloud-context warning injection (#378, #456)."""
import pytest
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).parent.parent))
from cron.scheduler import (
_LOCAL_SERVICE_PATTERNS,
_detect_local_service_refs,
_inject_cloud_context,
)
class TestDetectLocalServiceRefs:
"""Pattern detection for local service references in prompts."""
def test_localhost_with_port(self):
refs = _detect_local_service_refs("Check localhost:11434 is up")
assert len(refs) >= 1
assert any("11434" in r for r in refs)
def test_127_with_port(self):
refs = _detect_local_service_refs("curl http://127.0.0.1:8080/health")
assert len(refs) >= 1
def test_check_ollama(self):
refs = _detect_local_service_refs("Check Ollama is responding")
assert len(refs) >= 1
def test_ollama_responding(self):
refs = _detect_local_service_refs("Verify Ollama responding on this machine")
assert len(refs) >= 1
def test_curl_localhost(self):
refs = _detect_local_service_refs("curl localhost and report status")
assert len(refs) >= 1
def test_ping_localhost(self):
refs = _detect_local_service_refs("ping localhost to check connectivity")
assert len(refs) >= 1
def test_no_false_positive_cloud(self):
refs = _detect_local_service_refs("Check the weather in Paris today")
assert len(refs) == 0
def test_no_false_positive_api(self):
refs = _detect_local_service_refs("Call the OpenRouter API endpoint")
assert len(refs) == 0
def test_multiple_refs(self):
refs = _detect_local_service_refs("curl localhost:11434 then ping localhost")
assert len(refs) >= 2
class TestInjectCloudContext:
"""Cloud-context warning injection."""
def test_prepends_warning(self):
prompt = "Check Ollama is responding"
result = _inject_cloud_context(prompt, ["Check Ollama"], "nous")
assert result.startswith("[SYSTEM NOTE")
assert "nous" in result
assert prompt in result
def test_preserves_original_prompt(self):
prompt = "Check Ollama at localhost:11434"
result = _inject_cloud_context(prompt, ["localhost:11434"], "openrouter")
assert prompt in result
def test_mentions_cannot_reach(self):
prompt = "curl localhost"
result = _inject_cloud_context(prompt, ["curl localhost"], "nous")
assert "CANNOT reach" in result or "cannot reach" in result
def test_suggests_local_provider(self):
prompt = "Check Ollama"
result = _inject_cloud_context(prompt, ["Check Ollama"], "nous")
assert "local" in result.lower()
class TestCloudBypassLocal:
"""Local endpoints should not trigger injection."""
def test_local_endpoint_skips(self):
# The caller checks _is_cloud before calling _detect_local_service_refs
# so this is tested at integration level. Here we verify detection
# still finds refs (the bypass is the caller\'s responsibility).
refs = _detect_local_service_refs("Check Ollama at localhost:11434")
assert len(refs) > 0 # Detection works, caller decides whether to inject

View File

@@ -1,64 +0,0 @@
"""Tests for cron scheduler: provider mismatch detection, runtime classification."""
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).resolve().parent.parent))
def _import_scheduler():
import importlib.util
spec = importlib.util.spec_from_file_location(
"cron.scheduler", str(Path(__file__).resolve().parent.parent / "cron" / "scheduler.py"),
)
mod = importlib.util.module_from_spec(spec)
try:
spec.loader.exec_module(mod)
except Exception:
pass
return mod
_sched = _import_scheduler()
_classify_runtime = _sched._classify_runtime
_detect_provider_mismatch = _sched._detect_provider_mismatch
_build_job_prompt = _sched._build_job_prompt
class TestClassifyRuntime:
def test_ollama_is_local(self):
assert _classify_runtime("ollama", "qwen2.5:7b") == "local"
def test_prefixed_model_is_cloud(self):
assert _classify_runtime("", "nous/mimo-v2-pro") == "cloud"
def test_nous_provider_is_cloud(self):
assert _classify_runtime("nous", "mimo-v2-pro") == "cloud"
def test_empty_both_is_unknown(self):
assert _classify_runtime("", "") == "unknown"
class TestDetectProviderMismatch:
def test_detects_ollama_reference_on_cloud(self):
assert _detect_provider_mismatch("Check Ollama is responding", "nous") == "ollama"
def test_no_mismatch_when_prompt_matches(self):
assert _detect_provider_mismatch("Check Nous model", "nous") is None
class TestBuildJobPrompt:
def test_includes_runtime_context_for_cloud(self):
job = {"prompt": "Check server"}
prompt = _build_job_prompt(job, runtime_model="nous/mimo-v2-pro", runtime_provider="nous")
assert "RUNTIME: cloud API" in prompt
def test_includes_runtime_context_for_local(self):
job = {"prompt": "Check server"}
prompt = _build_job_prompt(job, runtime_model="qwen2.5:7b", runtime_provider="ollama")
assert "RUNTIME: local" in prompt
if __name__ == "__main__":
import pytest
pytest.main([__file__, "-v"])