Compare commits

...

1 Commits

Author SHA1 Message Date
40791bb5d9 fix(cron): inject cloud-context warning when prompt refs localhost (#378)
Some checks failed
Forge CI / smoke-and-build (pull_request) Failing after 58s
Health Monitor cron job prompts reference 'Check Ollama' and
'localhost:11434' but run on cloud models (nous/mimo-v2-pro) that
cannot reach local services. The agent wastes iterations on doomed
localhost calls.

Fix: After resolving the runtime endpoint, detect local service
references in the prompt. When the endpoint is cloud (not local),
inject a SYSTEM NOTE telling the agent it cannot reach localhost
and to report the configuration issue instead.

Changes:
- cron/scheduler.py: Add re import, _LOCAL_SERVICE_PATTERNS,
  _detect_local_service_refs(), _inject_cloud_context(). Call
  _inject_cloud_context() after resolve_turn_route() in run_job.
- tests/test_cron_cloud_context.py: 15 tests covering pattern
  detection, cloud injection, local endpoint bypass.

Closes #378
2026-04-13 21:01:33 -04:00
2 changed files with 199 additions and 0 deletions

View File

@@ -13,6 +13,7 @@ import concurrent.futures
import json
import logging
import os
import re
import subprocess
import sys
@@ -643,6 +644,59 @@ def _build_job_prompt(job: dict) -> str:
return "\n".join(parts)
# Patterns that indicate a prompt requires local/localhost service access
_LOCAL_SERVICE_PATTERNS = [
re.compile(r"localhost:\d+", re.IGNORECASE),
re.compile(r"127\.0\.0\.1:\d+", re.IGNORECASE),
re.compile(r"\bcheck\b.*\bollama\b", re.IGNORECASE),
re.compile(r"\bollama\b.*\brespond", re.IGNORECASE),
re.compile(r"\bcurl\b.*\blocal", re.IGNORECASE),
re.compile(r"\bcurl\b.*\b127\.", re.IGNORECASE),
re.compile(r"\bcurl\b.*\blocalhost", re.IGNORECASE),
re.compile(r"\bpolling\b.*\blocal", re.IGNORECASE),
re.compile(r"\bping\b.*\blocalhost", re.IGNORECASE),
re.compile(r"\bcheck.*\bservice\b.*\brespond", re.IGNORECASE),
]
def _detect_local_service_refs(prompt: str) -> list[str]:
"""Return list of matched local-service references in the prompt."""
matches = []
for pat in _LOCAL_SERVICE_PATTERNS:
found = pat.findall(prompt)
if found:
matches.extend(found[:2]) # Limit per pattern
return matches
def _inject_cloud_context(prompt: str, base_url: str) -> str:
"""If prompt references local services but runtime is cloud, inject a warning.
The agent sees this as a system note and can report the mismatch instead of
wasting iterations on doomed localhost calls.
"""
from agent.model_metadata import is_local_endpoint
if is_local_endpoint(base_url):
return prompt # Local endpoint — no issue
refs = _detect_local_service_refs(prompt)
if not refs:
return prompt # No local service references — no issue
refs_str = ", ".join(f"'{r}'" for r in refs[:5])
warning = (
"[SYSTEM NOTE: You are running on a cloud inference endpoint "
f"({base_url or 'cloud'}) which cannot reach localhost or local services. "
f"Your prompt references local services: {refs_str}. "
"You cannot curl/ping/SSH to localhost from this environment. "
"Report this as a configuration issue: the job should either be pinned "
"to a local provider (e.g. ollama at localhost:11434) or the prompt "
"should be rewritten to not assume local access. "
"Do NOT attempt localhost connections — report the limitation.]\n\n"
)
return warning + prompt
def run_job(job: dict) -> tuple[bool, str, str, Optional[str]]:
"""
Execute a single cron job.
@@ -794,6 +848,11 @@ def run_job(job: dict) -> tuple[bool, str, str, Optional[str]]:
},
)
# Inject cloud context warning if prompt references local services
# but the runtime is a cloud endpoint (#378)
_resolved_base_url = turn_route["runtime"].get("base_url", "")
prompt = _inject_cloud_context(prompt, _resolved_base_url)
# Build disabled toolsets — always exclude cronjob/messaging/clarify
# for cron sessions. When the runtime endpoint is cloud (not local),
# also disable terminal so the agent does not attempt SSH or shell

View File

@@ -0,0 +1,140 @@
"""Tests for cron prompt cloud-context injection (#378).
When a cron job prompt references localhost/Ollama but the runtime is a
cloud endpoint, a SYSTEM NOTE warning must be injected so the agent reports
the configuration issue instead of wasting iterations on doomed calls.
"""
import re
import pytest
# Mirror the patterns from cron/scheduler.py for isolated testing
_LOCAL_SERVICE_PATTERNS = [
re.compile(r"localhost:\d+", re.IGNORECASE),
re.compile(r"127\.0\.0\.1:\d+", re.IGNORECASE),
re.compile(r"\bcheck\b.*\bollama\b", re.IGNORECASE),
re.compile(r"\bollama\b.*\brespond", re.IGNORECASE),
re.compile(r"\bcurl\b.*\blocal", re.IGNORECASE),
re.compile(r"\bcurl\b.*\b127\.", re.IGNORECASE),
re.compile(r"\bcurl\b.*\blocalhost", re.IGNORECASE),
re.compile(r"\bpolling\b.*\blocal", re.IGNORECASE),
re.compile(r"\bping\b.*\blocalhost", re.IGNORECASE),
re.compile(r"\bcheck.*\bservice\b.*\brespond", re.IGNORECASE),
]
def _detect_local_service_refs(prompt: str) -> list[str]:
matches = []
for pat in _LOCAL_SERVICE_PATTERNS:
found = pat.findall(prompt)
if found:
matches.extend(found[:2])
return matches
def _is_local_endpoint(base_url: str) -> bool:
"""Mirror agent.model_metadata.is_local_endpoint for test isolation."""
if not base_url:
return False
from urllib.parse import urlparse
parsed = urlparse(base_url)
host = (parsed.hostname or "").lower()
return host in ("localhost", "127.0.0.1", "0.0.0.0") or (
host.startswith("10.") or host.startswith("192.168.") or
any(host.startswith(f"172.{i}.") for i in range(16, 32))
)
def _inject_cloud_context(prompt: str, base_url: str) -> str:
if _is_local_endpoint(base_url):
return prompt
refs = _detect_local_service_refs(prompt)
if not refs:
return prompt
refs_str = ", ".join(f"'{r}'" for r in refs[:5])
warning = (
"[SYSTEM NOTE: You are running on a cloud inference endpoint "
f"({base_url or 'cloud'}) which cannot reach localhost or local services. "
f"Your prompt references local services: {refs_str}. "
"You cannot curl/ping/SSH to localhost from this environment. "
"Report this as a configuration issue: the job should either be pinned "
"to a local provider (e.g. ollama at localhost:11434) or the prompt "
"should be rewritten to not assume local access. "
"Do NOT attempt localhost connections — report the limitation.]\n\n"
)
return warning + prompt
class TestDetectLocalServiceRefs:
def test_localhost_port(self):
refs = _detect_local_service_refs("Check http://localhost:11434/health")
assert any("localhost:11434" in r for r in refs)
def test_127_port(self):
refs = _detect_local_service_refs("curl http://127.0.0.1:8080/api")
assert any("127.0.0.1:8080" in r for r in refs)
def test_check_ollama(self):
refs = _detect_local_service_refs("Check Ollama is responding on this host")
assert len(refs) > 0
def test_ollama_responding(self):
refs = _detect_local_service_refs("Verify that Ollama responding to requests")
assert len(refs) > 0
def test_curl_localhost(self):
refs = _detect_local_service_refs("curl localhost:11434/api/tags")
assert any("localhost:11434" in r for r in refs)
def test_ping_localhost(self):
refs = _detect_local_service_refs("ping localhost to check connectivity")
assert len(refs) > 0
def test_no_match_normal(self):
refs = _detect_local_service_refs("Check the weather in New York")
assert len(refs) == 0
def test_no_match_forge(self):
refs = _detect_local_service_refs("Check forge.alexanderwhitestone.com for issues")
assert len(refs) == 0
class TestInjectCloudContext:
def test_injects_on_cloud_with_local_refs(self):
prompt = "Check Ollama is responding at localhost:11434"
result = _inject_cloud_context(prompt, "https://inference-api.nousresearch.com/v1")
assert "SYSTEM NOTE" in result
assert "cannot reach localhost" in result
assert "Check Ollama" in result
def test_no_inject_on_local_endpoint(self):
prompt = "Check Ollama is responding at localhost:11434"
result = _inject_cloud_context(prompt, "http://localhost:11434/v1")
assert "SYSTEM NOTE" not in result
assert result == prompt
def test_no_inject_without_local_refs(self):
prompt = "Check the forge for open issues"
result = _inject_cloud_context(prompt, "https://openrouter.ai/api/v1")
assert "SYSTEM NOTE" not in result
def test_injects_on_empty_url_with_refs(self):
prompt = "Check Ollama is responding"
result = _inject_cloud_context(prompt, "")
assert "SYSTEM NOTE" in result
def test_preserves_full_prompt(self):
prompt = "You are the Health Monitor. Check Ollama. Verify forge."
result = _inject_cloud_context(prompt, "https://api.anthropic.com")
assert "You are the Health Monitor" in result
assert "Verify forge" in result
def test_includes_provider_url(self):
prompt = "curl localhost:11434/api/tags"
result = _inject_cloud_context(prompt, "https://openrouter.ai/api/v1")
assert "openrouter.ai" in result
def test_rfc1918_treated_as_local(self):
prompt = "curl localhost:11434/api/tags"
result = _inject_cloud_context(prompt, "http://192.168.1.100:11434/v1")
assert result == prompt # No injection — RFC-1918 is local