Compare commits

..

1 Commits

Author SHA1 Message Date
Alexander Whitestone
3a9b172a1d fix: set legacy skill field from skills list in normalize_job
Some checks failed
Forge CI / smoke-and-build (pull_request) Failing after 19s
deploy-crons normalize_job() was normalizing model/provider fields
but ignoring skill/skills. Jobs with a `skills` list but no `skill`
field would be stored without the legacy field set, bypassing the
normalization that _apply_skill_fields() in cron/jobs.py provides.

Now normalize_job() deduplicates and sets both `skills` (list) and
`skill` (first element) using the same logic as _apply_skill_fields().

Fixes #579
2026-04-14 07:52:58 -04:00
4 changed files with 44 additions and 293 deletions

View File

@@ -1,179 +0,0 @@
"""Memory backends — cross-session user modeling.
Local SQLite default, Honcho cloud opt-in. Zero overhead when disabled.
Evaluation: Local A(~95pts) vs Honcho B(~60pts). Recommend local.
"""
import json, logging, os, sqlite3, time
from abc import ABC, abstractmethod
from dataclasses import dataclass, field
from pathlib import Path
from typing import Any, Dict, List, Optional
from hermes_constants import get_hermes_home
logger = logging.getLogger(__name__)
@dataclass
class Entry:
key: str; value: str; uid: str
etype: str = "preference"; created: float = 0; updated: float = 0; meta: Dict = field(default_factory=dict)
def __post_init__(self):
t = time.time()
if not self.created: self.created = t
if not self.updated: self.updated = t
class Backend(ABC):
@abstractmethod
def ok(self) -> bool: ...
def put(self, uid: str, k: str, v: str, meta: Dict = None) -> bool: ...
def get(self, uid: str, k: str) -> Optional[Entry]: ...
def find(self, uid: str, q: str, n: int = 10) -> List[Entry]: ...
def all(self, uid: str) -> List[Entry]: ...
def rm(self, uid: str, k: str) -> bool: ...
@property
def name(self) -> str: ...
@property
def cloud(self) -> bool: ...
class Null(Backend):
def ok(self) -> bool: return True
def put(self, uid, k, v, meta=None) -> bool: return True
def get(self, uid, k) -> Optional[Entry]: return None
def find(self, uid, q, n=10) -> List[Entry]: return []
def all(self, uid) -> List[Entry]: return []
def rm(self, uid, k) -> bool: return True
@property
def name(self) -> str: return "null"
@property
def cloud(self) -> bool: return False
class Local(Backend):
def __init__(self, p: Path = None):
self._p = p or get_hermes_home() / "memory.db"
self._p.parent.mkdir(parents=True, exist_ok=True)
with sqlite3.connect(str(self._p)) as c:
c.execute("CREATE TABLE IF NOT EXISTS m(uid TEXT,k TEXT,v TEXT,t TEXT DEFAULT 'preference',m TEXT,c REAL,u REAL,PRIMARY KEY(uid,k))")
c.commit()
def ok(self) -> bool:
try:
with sqlite3.connect(str(self._p)) as c: c.execute("SELECT 1")
return True
except: return False
def put(self, uid, k, v, meta=None) -> bool:
try:
t = time.time(); et = (meta or {}).get("type","preference")
with sqlite3.connect(str(self._p)) as c:
c.execute("INSERT INTO m VALUES(?,?,?,?,?,?,?) ON CONFLICT(uid,k) DO UPDATE SET v=excluded.v,t=excluded.t,m=excluded.m,u=excluded.u",
(uid,k,v,et,json.dumps(meta) if meta else None,t,t))
c.commit()
return True
except Exception as e: logger.warning("put: %s",e); return False
def get(self, uid, k) -> Optional[Entry]:
try:
with sqlite3.connect(str(self._p)) as c:
r = c.execute("SELECT k,v,uid,t,m,c,u FROM m WHERE uid=? AND k=?", (uid,k)).fetchone()
return Entry(key=r[0],value=r[1],uid=r[2],etype=r[3],meta=json.loads(r[4]) if r[4] else {},created=r[5],updated=r[6]) if r else None
except: return None
def find(self, uid, q, n=10) -> List[Entry]:
try:
p = f"%{q}%"
with sqlite3.connect(str(self._p)) as c:
rows = c.execute("SELECT k,v,uid,t,m,c,u FROM m WHERE uid=? AND (k LIKE ? OR v LIKE ?) ORDER BY u DESC LIMIT ?", (uid,p,p,n)).fetchall()
return [Entry(key=r[0],value=r[1],uid=r[2],etype=r[3],meta=json.loads(r[4]) if r[4] else {},created=r[5],updated=r[6]) for r in rows]
except: return []
def all(self, uid) -> List[Entry]:
try:
with sqlite3.connect(str(self._p)) as c:
rows = c.execute("SELECT k,v,uid,t,m,c,u FROM m WHERE uid=? ORDER BY u DESC",(uid,)).fetchall()
return [Entry(key=r[0],value=r[1],uid=r[2],etype=r[3],meta=json.loads(r[4]) if r[4] else {},created=r[5],updated=r[6]) for r in rows]
except: return []
def rm(self, uid, k) -> bool:
try:
with sqlite3.connect(str(self._p)) as c: c.execute("DELETE FROM m WHERE uid=? AND k=?", (uid,k)); c.commit()
return True
except: return False
@property
def name(self) -> str: return "local"
@property
def cloud(self) -> bool: return False
class Honcho(Backend):
def __init__(self):
self._c = None; self._k = os.getenv("HONCHO_API_KEY","")
def _lazy(self):
if self._c: return self._c
if not self._k: return None
try:
from honcho import Honcho as H
self._c = H(api_key=self._k); return self._c
except: return None
def ok(self) -> bool:
if not self._k: return False
c = self._lazy()
if not c: return False
try: c.get_sessions(limit=1); return True
except: return False
def put(self, uid, k, v, meta=None) -> bool:
c = self._lazy()
if not c: return False
try: c.add_message(f"m-{uid}","system",json.dumps({"k":k,"v":v})); return True
except: return False
def get(self, uid, k) -> Optional[Entry]:
for e in self.find(uid,k,1):
if e.key == k: return e
return None
def find(self, uid, q, n=10) -> List[Entry]:
c = self._lazy()
if not c: return []
try:
r = c.chat(f"m-{uid}", f"Find: {q}")
if isinstance(r,dict):
try:
data = json.loads(r.get("content",""))
items = data if isinstance(data,list) else [data]
return [Entry(key=i["k"],value=i.get("v",""),uid=uid) for i in items[:n] if isinstance(i,dict) and i.get("k")]
except: pass
return []
except: return []
def all(self, uid) -> List[Entry]: return self.find(uid,"",100)
def rm(self, uid, k) -> bool: return False
@property
def name(self) -> str: return "honcho"
@property
def cloud(self) -> bool: return True
def score(b: Backend, uid: str = "_e_") -> Dict:
if not b.ok(): return {"name":b.name,"score":0,"grade":"F","ok":False}
s = 20
t0 = time.perf_counter(); ok = b.put(uid,"ek","ev"); sm = (time.perf_counter()-t0)*1000; s += 15 if ok else 0
t0 = time.perf_counter(); r = b.get(uid,"ek"); gm = (time.perf_counter()-t0)*1000; s += 15 if r else 0
t0 = time.perf_counter(); q = b.find(uid,"ev",5); qm = (time.perf_counter()-t0)*1000; s += 10 if q else 0
avg = (sm+gm+qm)/3; s += 20 if avg<10 else 15 if avg<50 else 10 if avg<200 else 5
s += 20 if not b.cloud else 5
try: b.rm(uid,"ek")
except: pass
return {"name":b.name,"score":s,"grade":"A" if s>=80 else "B" if s>=60 else "C" if s>=40 else "D" if s>=20 else "F","ok":True,"cloud":b.cloud}
def evaluate() -> Dict:
bs = [Null(), Local()]
if os.getenv("HONCHO_API_KEY"):
try: bs.append(Honcho())
except: pass
rs = [score(b) for b in bs]
best = max((r for r in rs if r["name"]!="null" and r["ok"]), key=lambda r:r["score"], default=None)
rec = f"Best: {best['name']} ({best['score']}pts, {best['grade']})" if best else "None"
if best and best.get("cloud"): rec += " WARNING: cloud. RECOMMEND local."
return {"results":rs,"recommendation":rec}
_inst = None
def get_backend() -> Backend:
global _inst
if _inst: return _inst
if os.getenv("HONCHO_API_KEY") and os.getenv("HERMES_MEMORY_BACKEND","").lower()!="local":
try:
h = Honcho()
if h.ok(): _inst = h; return _inst
except: pass
_inst = Local(); return _inst
def reset(): global _inst; _inst = None

View File

@@ -18,9 +18,9 @@ from typing import Any, Dict, Optional
def normalize_job(job: Dict[str, Any]) -> Dict[str, Any]:
"""
Normalize a job dict to ensure consistent model field types.
Normalize a job dict to ensure consistent model field types and aligned skill fields.
Before normalization:
Model normalization:
- If model AND provider: model = raw string, provider = raw string (inconsistent)
- If only model: model = raw string
- If only provider: provider = raw string at top level
@@ -30,37 +30,61 @@ def normalize_job(job: Dict[str, Any]) -> Dict[str, Any]:
- If provider exists: model = {"provider": "yyy"}
- If both exist: model = {"model": "xxx", "provider": "yyy"}
- If neither: model = None
Skill normalization:
- Aligns legacy `skill` (single string) with `skills` (list), setting skill = skills[0]
"""
job = dict(job) # Create a copy to avoid modifying the original
# --- skill / skills normalization ---
raw_skill = job.get("skill")
raw_skills = job.get("skills")
if raw_skills is None:
skill_items = [raw_skill] if raw_skill else []
elif isinstance(raw_skills, str):
skill_items = [raw_skills]
else:
skill_items = list(raw_skills)
normalized_skills: list = []
for item in skill_items:
text = str(item or "").strip()
if text and text not in normalized_skills:
normalized_skills.append(text)
job["skills"] = normalized_skills
job["skill"] = normalized_skills[0] if normalized_skills else None
# --- model / provider normalization ---
model = job.get("model")
provider = job.get("provider")
# Skip if already normalized (model is a dict)
if isinstance(model, dict):
return job
# Build normalized model dict
model_dict = {}
if model is not None and isinstance(model, str):
model_dict["model"] = model.strip()
if provider is not None and isinstance(provider, str):
model_dict["provider"] = provider.strip()
# Set model field
if model_dict:
job["model"] = model_dict
else:
job["model"] = None
# Remove top-level provider field if it was moved into model dict
if provider is not None and "provider" in model_dict:
# Keep provider field for backward compatibility but mark it as deprecated
# This allows existing code that reads job["provider"] to continue working
pass
return job
@@ -90,20 +114,26 @@ def normalize_jobs_file(jobs_file: Path, dry_run: bool = False) -> int:
for i, job in enumerate(jobs):
original_model = job.get("model")
original_provider = job.get("provider")
original_skill = job.get("skill")
original_skills = job.get("skills")
normalized_job = normalize_job(job)
# Check if anything changed
if (normalized_job.get("model") != original_model or
normalized_job.get("provider") != original_provider):
normalized_job.get("provider") != original_provider or
normalized_job.get("skill") != original_skill or
normalized_job.get("skills") != original_skills):
jobs[i] = normalized_job
modified_count += 1
job_id = job.get("id", "?")
job_name = job.get("name", "(unnamed)")
print(f"Normalized job {job_id} ({job_name}):")
print(f" model: {original_model!r} -> {normalized_job.get('model')!r}")
print(f" provider: {original_provider!r} -> {normalized_job.get('provider')!r}")
print(f" skill: {original_skill!r} -> {normalized_job.get('skill')!r}")
print(f" skills: {original_skills!r} -> {normalized_job.get('skills')!r}")
if modified_count == 0:
print("All jobs already have consistent model field types.")

View File

@@ -1,64 +0,0 @@
"""Tests for memory backends (#322)."""
import json, pytest
from agent.memory import Entry, Null, Local, Honcho, score, evaluate, get_backend, reset
@pytest.fixture()
def loc(tmp_path): return Local(p=tmp_path/"t.db")
@pytest.fixture()
def rst(): reset(); yield; reset()
class TestEntry:
def test_defaults(self):
e = Entry(key="k",value="v",uid="u"); assert e.created > 0
class TestNull:
def test_ok(self): assert Null().ok()
def test_put(self): assert Null().put("u","k","v")
def test_get(self): assert Null().get("u","k") is None
def test_find(self): assert Null().find("u","q") == []
def test_not_cloud(self): assert not Null().cloud
class TestLocal:
def test_ok(self,loc): assert loc.ok()
def test_put_get(self,loc):
assert loc.put("u","lang","py")
e = loc.get("u","lang"); assert e.value == "py"
def test_meta(self,loc):
loc.put("u","k","v",{"type":"pattern"})
assert loc.get("u","k").etype == "pattern"
def test_update(self,loc):
loc.put("u","k","v1"); loc.put("u","k","v2")
assert loc.get("u","k").value == "v2"
def test_find(self,loc):
loc.put("u","pref_py","1"); loc.put("u","pref_vim","1"); loc.put("u","th","d")
assert len(loc.find("u","pref")) == 2
def test_all(self,loc):
loc.put("u","a","1"); loc.put("u","b","2")
assert len(loc.all("u")) == 2
def test_rm(self,loc):
loc.put("u","k","v"); assert loc.rm("u","k"); assert loc.get("u","k") is None
def test_not_cloud(self,loc): assert not loc.cloud
def test_users(self,loc):
loc.put("u1","k","v1"); loc.put("u2","k","v2")
assert loc.get("u1","k").value == "v1"
class TestHoncho:
def test_no_key(self,monkeypatch):
monkeypatch.delenv("HONCHO_API_KEY",raising=False)
assert not Honcho().ok()
def test_cloud(self): assert Honcho().cloud
class TestScore:
def test_null(self):
r = score(Null()); assert r["score"] > 0
def test_local(self,loc):
r = score(loc); assert r["ok"]; assert r["score"] >= 80; assert r["grade"] == "A"
def test_eval(self,rst):
r = evaluate(); assert len(r["results"]) >= 2; assert "recommendation" in r
class TestSingleton:
def test_default(self,rst,monkeypatch):
monkeypatch.delenv("HONCHO_API_KEY",raising=False)
assert isinstance(get_backend(), Local)
def test_cache(self,rst): assert get_backend() is get_backend()

View File

@@ -1,36 +0,0 @@
"""Memory backend tool — cross-session prefs. Local default, Honcho opt-in."""
import json
from tools.registry import registry
def memory_backend(action, uid="default", key=None, value=None, query=None, meta=None):
from agent.memory import get_backend, evaluate
b = get_backend()
if action=="info": return json.dumps({"ok":True,"backend":b.name,"cloud":b.cloud,"available":b.ok()})
if action=="store":
if not key or value is None: return json.dumps({"ok":False,"error":"key+value required"})
return json.dumps({"ok":b.put(uid,key,value,meta),"key":key})
if action=="get":
if not key: return json.dumps({"ok":False,"error":"key required"})
e = b.get(uid,key)
return json.dumps({"ok":True,"key":e.key,"value":e.value,"type":e.etype}) if e else json.dumps({"ok":False,"error":"not found"})
if action=="query":
if not query: return json.dumps({"ok":False,"error":"query required"})
r = b.find(uid,query)
return json.dumps({"ok":True,"results":[{"key":e.key,"value":e.value} for e in r],"count":len(r)})
if action=="list":
r = b.all(uid)
return json.dumps({"ok":True,"entries":[{"key":e.key,"type":e.etype} for e in r],"count":len(r)})
if action=="delete":
if not key: return json.dumps({"ok":False,"error":"key required"})
return json.dumps({"ok":b.rm(uid,key)})
if action=="evaluate": return json.dumps({"ok":True,**evaluate()})
return json.dumps({"ok":False,"error":f"unknown: {action}"})
registry.register(name="memory_backend",toolset="skills",schema={
"name":"memory_backend","description":"Cross-session memory backends. Local SQLite default, Honcho cloud opt-in. Zero overhead when disabled.",
"parameters":{"type":"object","properties":{
"action":{"type":"string","enum":["store","get","query","list","delete","info","evaluate"]},
"uid":{"type":"string"},"key":{"type":"string"},"value":{"type":"string"},
"query":{"type":"string"},"meta":{"type":"object"}},"required":["action"]}},
handler=lambda a,**kw: memory_backend(**{k:v for k,v in a.items() if v is not None}),emoji="🧠")