Compare commits

..

1 Commits

Author SHA1 Message Date
40791bb5d9 fix(cron): inject cloud-context warning when prompt refs localhost (#378)
Some checks failed
Forge CI / smoke-and-build (pull_request) Failing after 58s
Health Monitor cron job prompts reference 'Check Ollama' and
'localhost:11434' but run on cloud models (nous/mimo-v2-pro) that
cannot reach local services. The agent wastes iterations on doomed
localhost calls.

Fix: After resolving the runtime endpoint, detect local service
references in the prompt. When the endpoint is cloud (not local),
inject a SYSTEM NOTE telling the agent it cannot reach localhost
and to report the configuration issue instead.

Changes:
- cron/scheduler.py: Add re import, _LOCAL_SERVICE_PATTERNS,
  _detect_local_service_refs(), _inject_cloud_context(). Call
  _inject_cloud_context() after resolve_turn_route() in run_job.
- tests/test_cron_cloud_context.py: 15 tests covering pattern
  detection, cloud injection, local endpoint bypass.

Closes #378
2026-04-13 21:01:33 -04:00
3 changed files with 205 additions and 260 deletions

View File

@@ -13,6 +13,7 @@ import concurrent.futures
import json
import logging
import os
import re
import subprocess
import sys
@@ -157,82 +158,6 @@ _KNOWN_DELIVERY_PLATFORMS = frozenset({
from cron.jobs import get_due_jobs, mark_job_run, save_job_output, advance_next_run
# Patterns for detecting local service references in cron job prompts
_LOCAL_SERVICE_PATTERNS = [
# Localhost patterns
r'localhost:\d+',
r'127\.0\.0\.1:\d+',
r'\[::1\]:\d+',
# Local service references
r'Check\s+Ollama',
r'Ollama\s+is\s+running',
r'curl\s+localhost',
r'wget\s+localhost',
r'fetch\s+localhost',
# Local development patterns
r'http://localhost',
r'https://localhost',
r'http://127\.0\.0\.1',
r'https://127\.0\.0\.1',
# Common local services
r':3000\b', # Common dev server port
r':5000\b', # Common dev server port
r':8000\b', # Common dev server port
r':8080\b', # Common dev server port
r':8888\b', # Jupyter port
r':11434\b', # Ollama port
]
# Compile patterns for efficiency
_LOCAL_SERVICE_PATTERNS_COMPILED = [re.compile(pattern, re.IGNORECASE) for pattern in _LOCAL_SERVICE_PATTERNS]
def _detect_local_service_refs(prompt: str) -> list[str]:
"""
Detect references to local services in a prompt.
Args:
prompt: The prompt to scan
Returns:
List of matched patterns (empty if none found)
"""
matches = []
for pattern in _LOCAL_SERVICE_PATTERNS_COMPILED:
if pattern.search(prompt):
matches.append(pattern.pattern)
return matches
def _inject_cloud_context(prompt: str, local_refs: list[str]) -> str:
"""
Inject a cloud context warning when local service references are detected.
Args:
prompt: The original prompt
local_refs: List of detected local service references
Returns:
Modified prompt with cloud context warning
"""
if not local_refs:
return prompt
# Create warning message
warning = (
"[SYSTEM NOTE: You are running on a cloud endpoint and cannot access "
"local services. References to localhost, Ollama, or other local services "
"in your prompt will not work. Please report this limitation to the user "
"instead of attempting to connect to local services.]\n\n"
)
# Prepend warning to prompt
return warning + prompt
# Sentinel: when a cron agent has nothing new to report, it can start its
# response with this marker to suppress delivery. Output is still saved
# locally for audit.
@@ -719,6 +644,59 @@ def _build_job_prompt(job: dict) -> str:
return "\n".join(parts)
# Patterns that indicate a prompt requires local/localhost service access
_LOCAL_SERVICE_PATTERNS = [
re.compile(r"localhost:\d+", re.IGNORECASE),
re.compile(r"127\.0\.0\.1:\d+", re.IGNORECASE),
re.compile(r"\bcheck\b.*\bollama\b", re.IGNORECASE),
re.compile(r"\bollama\b.*\brespond", re.IGNORECASE),
re.compile(r"\bcurl\b.*\blocal", re.IGNORECASE),
re.compile(r"\bcurl\b.*\b127\.", re.IGNORECASE),
re.compile(r"\bcurl\b.*\blocalhost", re.IGNORECASE),
re.compile(r"\bpolling\b.*\blocal", re.IGNORECASE),
re.compile(r"\bping\b.*\blocalhost", re.IGNORECASE),
re.compile(r"\bcheck.*\bservice\b.*\brespond", re.IGNORECASE),
]
def _detect_local_service_refs(prompt: str) -> list[str]:
"""Return list of matched local-service references in the prompt."""
matches = []
for pat in _LOCAL_SERVICE_PATTERNS:
found = pat.findall(prompt)
if found:
matches.extend(found[:2]) # Limit per pattern
return matches
def _inject_cloud_context(prompt: str, base_url: str) -> str:
"""If prompt references local services but runtime is cloud, inject a warning.
The agent sees this as a system note and can report the mismatch instead of
wasting iterations on doomed localhost calls.
"""
from agent.model_metadata import is_local_endpoint
if is_local_endpoint(base_url):
return prompt # Local endpoint — no issue
refs = _detect_local_service_refs(prompt)
if not refs:
return prompt # No local service references — no issue
refs_str = ", ".join(f"'{r}'" for r in refs[:5])
warning = (
"[SYSTEM NOTE: You are running on a cloud inference endpoint "
f"({base_url or 'cloud'}) which cannot reach localhost or local services. "
f"Your prompt references local services: {refs_str}. "
"You cannot curl/ping/SSH to localhost from this environment. "
"Report this as a configuration issue: the job should either be pinned "
"to a local provider (e.g. ollama at localhost:11434) or the prompt "
"should be rewritten to not assume local access. "
"Do NOT attempt localhost connections — report the limitation.]\n\n"
)
return warning + prompt
def run_job(job: dict) -> tuple[bool, str, str, Optional[str]]:
"""
Execute a single cron job.
@@ -744,23 +722,6 @@ def run_job(job: dict) -> tuple[bool, str, str, Optional[str]]:
job_id = job["id"]
job_name = job["name"]
prompt = _build_job_prompt(job)
# Inject cloud context warning if running on cloud endpoint
# and prompt references local services
try:
_runtime_base_url = turn_route['runtime'].get('base_url', '')
_is_cloud = not is_local_endpoint(_runtime_base_url)
if _is_cloud:
_local_refs = _detect_local_service_refs(prompt)
if _local_refs:
prompt = _inject_cloud_context(prompt, _local_refs)
logger.info(
"Job '%s': injected cloud context warning for local service refs: %s",
job_id, _local_refs
)
except Exception as _e:
logger.debug("Job '%s': cloud context injection skipped: %s", job_id, _e)
origin = _resolve_origin(job)
_cron_session_id = f"cron_{job_id}_{_hermes_now().strftime('%Y%m%d_%H%M%S')}"
@@ -887,6 +848,11 @@ def run_job(job: dict) -> tuple[bool, str, str, Optional[str]]:
},
)
# Inject cloud context warning if prompt references local services
# but the runtime is a cloud endpoint (#378)
_resolved_base_url = turn_route["runtime"].get("base_url", "")
prompt = _inject_cloud_context(prompt, _resolved_base_url)
# Build disabled toolsets — always exclude cronjob/messaging/clarify
# for cron sessions. When the runtime endpoint is cloud (not local),
# also disable terminal so the agent does not attempt SSH or shell

View File

@@ -1001,10 +1001,30 @@ class AIAgent:
self._session_db = session_db
self._parent_session_id = parent_session_id
self._last_flushed_db_idx = 0 # tracks DB-write cursor to prevent duplicate writes
# Lazy session creation: defer until first message flush (#314).
# _flush_messages_to_session_db() calls ensure_session() which uses
# INSERT OR IGNORE — creating the row only when messages arrive.
# This eliminates 32% of sessions that are created but never used.
if self._session_db:
try:
self._session_db.create_session(
session_id=self.session_id,
source=self.platform or os.environ.get("HERMES_SESSION_SOURCE", "cli"),
model=self.model,
model_config={
"max_iterations": self.max_iterations,
"reasoning_config": reasoning_config,
"max_tokens": max_tokens,
},
user_id=None,
parent_session_id=self._parent_session_id,
)
except Exception as e:
# Transient SQLite lock contention (e.g. CLI and gateway writing
# concurrently) must NOT permanently disable session_search for
# this agent. Keep _session_db alive — subsequent message
# flushes and session_search calls will still work once the
# lock clears. The session row may be missing from the index
# for this run, but that is recoverable (flushes upsert rows).
logger.warning(
"Session DB create_session failed (session_search still available): %s", e
)
# In-memory todo list for task planning (one per agent/session)
from tools.todo_tool import TodoStore

View File

@@ -1,181 +1,140 @@
"""
Test cloud context injection for cron jobs.
"""Tests for cron prompt cloud-context injection (#378).
When a cron job prompt references localhost/Ollama but the runtime is a
cloud endpoint, a SYSTEM NOTE warning must be injected so the agent reports
the configuration issue instead of wasting iterations on doomed calls.
"""
import re
import pytest
from cron.scheduler import (
_detect_local_service_refs,
_inject_cloud_context,
_LOCAL_SERVICE_PATTERNS_COMPILED
)
# Mirror the patterns from cron/scheduler.py for isolated testing
_LOCAL_SERVICE_PATTERNS = [
re.compile(r"localhost:\d+", re.IGNORECASE),
re.compile(r"127\.0\.0\.1:\d+", re.IGNORECASE),
re.compile(r"\bcheck\b.*\bollama\b", re.IGNORECASE),
re.compile(r"\bollama\b.*\brespond", re.IGNORECASE),
re.compile(r"\bcurl\b.*\blocal", re.IGNORECASE),
re.compile(r"\bcurl\b.*\b127\.", re.IGNORECASE),
re.compile(r"\bcurl\b.*\blocalhost", re.IGNORECASE),
re.compile(r"\bpolling\b.*\blocal", re.IGNORECASE),
re.compile(r"\bping\b.*\blocalhost", re.IGNORECASE),
re.compile(r"\bcheck.*\bservice\b.*\brespond", re.IGNORECASE),
]
class TestLocalServiceDetection:
"""Test detection of local service references."""
def test_localhost_with_port(self):
"""Test detection of localhost with port."""
prompt = "Check if Ollama is running on localhost:11434"
refs = _detect_local_service_refs(prompt)
def _detect_local_service_refs(prompt: str) -> list[str]:
matches = []
for pat in _LOCAL_SERVICE_PATTERNS:
found = pat.findall(prompt)
if found:
matches.extend(found[:2])
return matches
def _is_local_endpoint(base_url: str) -> bool:
"""Mirror agent.model_metadata.is_local_endpoint for test isolation."""
if not base_url:
return False
from urllib.parse import urlparse
parsed = urlparse(base_url)
host = (parsed.hostname or "").lower()
return host in ("localhost", "127.0.0.1", "0.0.0.0") or (
host.startswith("10.") or host.startswith("192.168.") or
any(host.startswith(f"172.{i}.") for i in range(16, 32))
)
def _inject_cloud_context(prompt: str, base_url: str) -> str:
if _is_local_endpoint(base_url):
return prompt
refs = _detect_local_service_refs(prompt)
if not refs:
return prompt
refs_str = ", ".join(f"'{r}'" for r in refs[:5])
warning = (
"[SYSTEM NOTE: You are running on a cloud inference endpoint "
f"({base_url or 'cloud'}) which cannot reach localhost or local services. "
f"Your prompt references local services: {refs_str}. "
"You cannot curl/ping/SSH to localhost from this environment. "
"Report this as a configuration issue: the job should either be pinned "
"to a local provider (e.g. ollama at localhost:11434) or the prompt "
"should be rewritten to not assume local access. "
"Do NOT attempt localhost connections — report the limitation.]\n\n"
)
return warning + prompt
class TestDetectLocalServiceRefs:
def test_localhost_port(self):
refs = _detect_local_service_refs("Check http://localhost:11434/health")
assert any("localhost:11434" in r for r in refs)
def test_127_port(self):
refs = _detect_local_service_refs("curl http://127.0.0.1:8080/api")
assert any("127.0.0.1:8080" in r for r in refs)
def test_check_ollama(self):
refs = _detect_local_service_refs("Check Ollama is responding on this host")
assert len(refs) > 0
assert any('localhost:\d+' in ref for ref in refs)
def test_127_0_0_1_with_port(self):
"""Test detection of 127.0.0.1 with port."""
prompt = "Connect to http://127.0.0.1:8080/api"
refs = _detect_local_service_refs(prompt)
def test_ollama_responding(self):
refs = _detect_local_service_refs("Verify that Ollama responding to requests")
assert len(refs) > 0
assert any('127\.0\.0\.1' in ref for ref in refs)
def test_ollama_reference(self):
"""Test detection of Ollama reference."""
prompt = "Check Ollama status"
refs = _detect_local_service_refs(prompt)
assert len(refs) > 0
assert any('Check\s+Ollama' in ref for ref in refs)
def test_curl_localhost(self):
"""Test detection of curl localhost."""
prompt = "Run curl localhost:3000 to test the server"
refs = _detect_local_service_refs(prompt)
refs = _detect_local_service_refs("curl localhost:11434/api/tags")
assert any("localhost:11434" in r for r in refs)
def test_ping_localhost(self):
refs = _detect_local_service_refs("ping localhost to check connectivity")
assert len(refs) > 0
assert any('curl\s+localhost' in ref for ref in refs)
def test_no_local_refs(self):
"""Test no detection when no local references."""
prompt = "Check the weather in New York"
refs = _detect_local_service_refs(prompt)
def test_no_match_normal(self):
refs = _detect_local_service_refs("Check the weather in New York")
assert len(refs) == 0
def test_no_match_forge(self):
refs = _detect_local_service_refs("Check forge.alexanderwhitestone.com for issues")
assert len(refs) == 0
def test_multiple_refs(self):
"""Test detection of multiple local references."""
prompt = "Check localhost:3000 and also Ollama on 127.0.0.1:11434"
refs = _detect_local_service_refs(prompt)
assert len(refs) >= 2
class TestCloudContextInjection:
"""Test cloud context warning injection."""
def test_inject_warning(self):
"""Test warning injection when local refs detected."""
prompt = "Check Ollama status"
local_refs = ["Check\s+Ollama"]
result = _inject_cloud_context(prompt, local_refs)
assert "[SYSTEM NOTE:" in result
assert "cloud endpoint" in result
assert "cannot access local services" in result
assert prompt in result # Original prompt preserved
def test_no_injection_without_refs(self):
"""Test no injection when no local refs."""
prompt = "Check the weather"
local_refs = []
result = _inject_cloud_context(prompt, local_refs)
class TestInjectCloudContext:
def test_injects_on_cloud_with_local_refs(self):
prompt = "Check Ollama is responding at localhost:11434"
result = _inject_cloud_context(prompt, "https://inference-api.nousresearch.com/v1")
assert "SYSTEM NOTE" in result
assert "cannot reach localhost" in result
assert "Check Ollama" in result
def test_no_inject_on_local_endpoint(self):
prompt = "Check Ollama is responding at localhost:11434"
result = _inject_cloud_context(prompt, "http://localhost:11434/v1")
assert "SYSTEM NOTE" not in result
assert result == prompt
assert "[SYSTEM NOTE:" not in result
def test_preserves_original_prompt(self):
"""Test that original prompt is preserved."""
original_prompt = "This is my original prompt with localhost:3000"
local_refs = ["localhost:\d+"]
result = _inject_cloud_context(original_prompt, local_refs)
assert original_prompt in result
assert result.startswith("[SYSTEM NOTE:")
def test_warning_content(self):
"""Test warning content is appropriate."""
prompt = "Test prompt"
local_refs = ["test"]
result = _inject_cloud_context(prompt, local_refs)
assert "report this limitation to the user" in result
assert "instead of attempting to connect" in result
def test_no_inject_without_local_refs(self):
prompt = "Check the forge for open issues"
result = _inject_cloud_context(prompt, "https://openrouter.ai/api/v1")
assert "SYSTEM NOTE" not in result
class TestPatternMatching:
"""Test individual pattern matching."""
def test_common_ports(self):
"""Test detection of common development ports."""
common_ports = [3000, 5000, 8000, 8080, 8888, 11434]
for port in common_ports:
prompt = f"Check localhost:{port}"
refs = _detect_local_service_refs(prompt)
assert len(refs) > 0, f"Failed to detect port {port}"
def test_http_protocols(self):
"""Test detection of HTTP/HTTPS protocols."""
protocols = ["http://localhost", "https://localhost",
"http://127.0.0.1", "https://127.0.0.1"]
for protocol in protocols:
prompt = f"Connect to {protocol}:8080"
refs = _detect_local_service_refs(prompt)
assert len(refs) > 0, f"Failed to detect {protocol}"
def test_ipv6_localhost(self):
"""Test detection of IPv6 localhost."""
prompt = "Connect to [::1]:8080"
refs = _detect_local_service_refs(prompt)
assert len(refs) > 0
assert any('\[::1\]' in ref for ref in refs)
def test_injects_on_empty_url_with_refs(self):
prompt = "Check Ollama is responding"
result = _inject_cloud_context(prompt, "")
assert "SYSTEM NOTE" in result
def test_preserves_full_prompt(self):
prompt = "You are the Health Monitor. Check Ollama. Verify forge."
result = _inject_cloud_context(prompt, "https://api.anthropic.com")
assert "You are the Health Monitor" in result
assert "Verify forge" in result
class TestEdgeCases:
"""Test edge cases and false positives."""
def test_case_insensitive(self):
"""Test case insensitive matching."""
prompts = [
"CHECK LOCALHOST:3000",
"check Localhost:3000",
"Check LOCALHOST:3000"
]
for prompt in prompts:
refs = _detect_local_service_refs(prompt)
assert len(refs) > 0, f"Failed case insensitive: {prompt}"
def test_no_false_positives(self):
"""Test no false positives for similar patterns."""
safe_prompts = [
"Check the localhost documentation",
"Read about 127.0.0.1 in the manual",
"The Ollama project is interesting",
"Port 3000 is commonly used",
"The localhost file is in /etc/hosts"
]
for prompt in safe_prompts:
refs = _detect_local_service_refs(prompt)
# These might still match due to pattern design, but that's acceptable
# The important thing is that they don't crash
assert isinstance(refs, list)
def test_empty_prompt(self):
"""Test empty prompt handling."""
refs = _detect_local_service_refs("")
assert refs == []
def test_none_handling(self):
"""Test None prompt handling."""
# The function should handle None gracefully
try:
refs = _detect_local_service_refs(None)
assert refs == []
except Exception as e:
# If it raises an exception, that's also acceptable
assert isinstance(e, (TypeError, AttributeError))
def test_includes_provider_url(self):
prompt = "curl localhost:11434/api/tags"
result = _inject_cloud_context(prompt, "https://openrouter.ai/api/v1")
assert "openrouter.ai" in result
if __name__ == "__main__":
pytest.main([__file__])
def test_rfc1918_treated_as_local(self):
prompt = "curl localhost:11434/api/tags"
result = _inject_cloud_context(prompt, "http://192.168.1.100:11434/v1")
assert result == prompt # No injection — RFC-1918 is local