Compare commits

..

1 Commits

Author SHA1 Message Date
Alexander Whitestone
3a9b172a1d fix: set legacy skill field from skills list in normalize_job
Some checks failed
Forge CI / smoke-and-build (pull_request) Failing after 19s
deploy-crons normalize_job() was normalizing model/provider fields
but ignoring skill/skills. Jobs with a `skills` list but no `skill`
field would be stored without the legacy field set, bypassing the
normalization that _apply_skill_fields() in cron/jobs.py provides.

Now normalize_job() deduplicates and sets both `skills` (list) and
`skill` (first element) using the same logic as _apply_skill_fields().

Fixes #579
2026-04-14 07:52:58 -04:00
3 changed files with 44 additions and 167 deletions

View File

@@ -12,7 +12,6 @@ import asyncio
import concurrent.futures
import json
import logging
import re
import os
import subprocess
import sys
@@ -545,55 +544,6 @@ def _run_job_script(script_path: str) -> tuple[bool, str]:
except Exception as exc:
return False, f"Script execution failed: {exc}"
# ---------------------------------------------------------------------------
# Cloud-context warning for local-service references (#378, #456)
# ---------------------------------------------------------------------------
_LOCAL_SERVICE_PATTERNS = [
re.compile(r'localhost:\d+', re.IGNORECASE),
re.compile(r'127\.0\.0\.1:\d+'),
re.compile(r'check\s+ollama', re.IGNORECASE),
re.compile(r'ollama\s+(is\s+)?respond', re.IGNORECASE),
re.compile(r'curl\s+localhost', re.IGNORECASE),
re.compile(r'curl\s+127\.', re.IGNORECASE),
re.compile(r'curl\s+local', re.IGNORECASE),
re.compile(r'ping\s+localhost', re.IGNORECASE),
re.compile(r'poll(ing)?\s+local', re.IGNORECASE),
re.compile(r'check\s+service\s+respond', re.IGNORECASE),
re.compile(r'11434'), # Ollama default port
re.compile(r'11435'), # common alt Ollama port
]
def _detect_local_service_refs(prompt: str) -> list[str]:
"""Return list of local-service reference descriptions found in prompt."""
refs = []
for pat in _LOCAL_SERVICE_PATTERNS:
m = pat.search(prompt)
if m:
refs.append(m.group(0))
return refs
def _inject_cloud_context(prompt: str, refs: list[str], provider: str) -> str:
"""Prepend a SYSTEM NOTE so the agent knows it cannot reach localhost."""
refs_str = ", ".join(f'"{r}"' for r in refs)
warning = (
"[SYSTEM NOTE — cloud endpoint]
"
f"You are running on a cloud inference endpoint ({provider}). "
f"Your prompt references local services: {refs_str}. "
"You CANNOT reach localhost or any local network address from this endpoint. "
"Do NOT attempt curl, ping, SSH, or any network calls to localhost. "
"Instead, report to the user that this job requires a local model endpoint "
"to check local services, and suggest they re-run with a local provider.
"
)
return warning + prompt
def _build_job_prompt(job: dict) -> str:
"""Build the effective prompt for a cron job, optionally loading one or more skills first."""
@@ -867,18 +817,6 @@ def run_job(job: dict) -> tuple[bool, str, str, Optional[str]]:
job_name,
)
# Inject cloud-context warning when prompt references local services (#378)
if _is_cloud:
_local_refs = _detect_local_service_refs(prompt)
if _local_refs:
_provider_name = turn_route["runtime"].get("provider", "cloud")
prompt = _inject_cloud_context(prompt, _local_refs, _provider_name)
logger.info(
"Job '%s': injected cloud-context warning for local refs: %s",
job_name,
_local_refs,
)
_agent_kwargs = _safe_agent_kwargs({
"model": turn_route["model"],
"api_key": turn_route["runtime"].get("api_key"),

View File

@@ -18,9 +18,9 @@ from typing import Any, Dict, Optional
def normalize_job(job: Dict[str, Any]) -> Dict[str, Any]:
"""
Normalize a job dict to ensure consistent model field types.
Normalize a job dict to ensure consistent model field types and aligned skill fields.
Before normalization:
Model normalization:
- If model AND provider: model = raw string, provider = raw string (inconsistent)
- If only model: model = raw string
- If only provider: provider = raw string at top level
@@ -30,37 +30,61 @@ def normalize_job(job: Dict[str, Any]) -> Dict[str, Any]:
- If provider exists: model = {"provider": "yyy"}
- If both exist: model = {"model": "xxx", "provider": "yyy"}
- If neither: model = None
Skill normalization:
- Aligns legacy `skill` (single string) with `skills` (list), setting skill = skills[0]
"""
job = dict(job) # Create a copy to avoid modifying the original
# --- skill / skills normalization ---
raw_skill = job.get("skill")
raw_skills = job.get("skills")
if raw_skills is None:
skill_items = [raw_skill] if raw_skill else []
elif isinstance(raw_skills, str):
skill_items = [raw_skills]
else:
skill_items = list(raw_skills)
normalized_skills: list = []
for item in skill_items:
text = str(item or "").strip()
if text and text not in normalized_skills:
normalized_skills.append(text)
job["skills"] = normalized_skills
job["skill"] = normalized_skills[0] if normalized_skills else None
# --- model / provider normalization ---
model = job.get("model")
provider = job.get("provider")
# Skip if already normalized (model is a dict)
if isinstance(model, dict):
return job
# Build normalized model dict
model_dict = {}
if model is not None and isinstance(model, str):
model_dict["model"] = model.strip()
if provider is not None and isinstance(provider, str):
model_dict["provider"] = provider.strip()
# Set model field
if model_dict:
job["model"] = model_dict
else:
job["model"] = None
# Remove top-level provider field if it was moved into model dict
if provider is not None and "provider" in model_dict:
# Keep provider field for backward compatibility but mark it as deprecated
# This allows existing code that reads job["provider"] to continue working
pass
return job
@@ -90,20 +114,26 @@ def normalize_jobs_file(jobs_file: Path, dry_run: bool = False) -> int:
for i, job in enumerate(jobs):
original_model = job.get("model")
original_provider = job.get("provider")
original_skill = job.get("skill")
original_skills = job.get("skills")
normalized_job = normalize_job(job)
# Check if anything changed
if (normalized_job.get("model") != original_model or
normalized_job.get("provider") != original_provider):
normalized_job.get("provider") != original_provider or
normalized_job.get("skill") != original_skill or
normalized_job.get("skills") != original_skills):
jobs[i] = normalized_job
modified_count += 1
job_id = job.get("id", "?")
job_name = job.get("name", "(unnamed)")
print(f"Normalized job {job_id} ({job_name}):")
print(f" model: {original_model!r} -> {normalized_job.get('model')!r}")
print(f" provider: {original_provider!r} -> {normalized_job.get('provider')!r}")
print(f" skill: {original_skill!r} -> {normalized_job.get('skill')!r}")
print(f" skills: {original_skills!r} -> {normalized_job.get('skills')!r}")
if modified_count == 0:
print("All jobs already have consistent model field types.")

View File

@@ -1,91 +0,0 @@
"""Tests for cloud-context warning injection (#378, #456)."""
import pytest
import sys
from pathlib import Path
sys.path.insert(0, str(Path(__file__).parent.parent))
from cron.scheduler import (
_LOCAL_SERVICE_PATTERNS,
_detect_local_service_refs,
_inject_cloud_context,
)
class TestDetectLocalServiceRefs:
"""Pattern detection for local service references in prompts."""
def test_localhost_with_port(self):
refs = _detect_local_service_refs("Check localhost:11434 is up")
assert len(refs) >= 1
assert any("11434" in r for r in refs)
def test_127_with_port(self):
refs = _detect_local_service_refs("curl http://127.0.0.1:8080/health")
assert len(refs) >= 1
def test_check_ollama(self):
refs = _detect_local_service_refs("Check Ollama is responding")
assert len(refs) >= 1
def test_ollama_responding(self):
refs = _detect_local_service_refs("Verify Ollama responding on this machine")
assert len(refs) >= 1
def test_curl_localhost(self):
refs = _detect_local_service_refs("curl localhost and report status")
assert len(refs) >= 1
def test_ping_localhost(self):
refs = _detect_local_service_refs("ping localhost to check connectivity")
assert len(refs) >= 1
def test_no_false_positive_cloud(self):
refs = _detect_local_service_refs("Check the weather in Paris today")
assert len(refs) == 0
def test_no_false_positive_api(self):
refs = _detect_local_service_refs("Call the OpenRouter API endpoint")
assert len(refs) == 0
def test_multiple_refs(self):
refs = _detect_local_service_refs("curl localhost:11434 then ping localhost")
assert len(refs) >= 2
class TestInjectCloudContext:
"""Cloud-context warning injection."""
def test_prepends_warning(self):
prompt = "Check Ollama is responding"
result = _inject_cloud_context(prompt, ["Check Ollama"], "nous")
assert result.startswith("[SYSTEM NOTE")
assert "nous" in result
assert prompt in result
def test_preserves_original_prompt(self):
prompt = "Check Ollama at localhost:11434"
result = _inject_cloud_context(prompt, ["localhost:11434"], "openrouter")
assert prompt in result
def test_mentions_cannot_reach(self):
prompt = "curl localhost"
result = _inject_cloud_context(prompt, ["curl localhost"], "nous")
assert "CANNOT reach" in result or "cannot reach" in result
def test_suggests_local_provider(self):
prompt = "Check Ollama"
result = _inject_cloud_context(prompt, ["Check Ollama"], "nous")
assert "local" in result.lower()
class TestCloudBypassLocal:
"""Local endpoints should not trigger injection."""
def test_local_endpoint_skips(self):
# The caller checks _is_cloud before calling _detect_local_service_refs
# so this is tested at integration level. Here we verify detection
# still finds refs (the bypass is the caller\'s responsibility).
refs = _detect_local_service_refs("Check Ollama at localhost:11434")
assert len(refs) > 0 # Detection works, caller decides whether to inject