Compare commits

..

2 Commits

Author SHA1 Message Date
Alexander Whitestone
599904d945 fix: log plugin memory provider fallback failure at debug level
All checks were successful
Lint / lint (pull_request) Successful in 9s
Address review feedback on PR #1002: replace silent `except Exception: pass`
with `logger.debug(...)` so plugin loading failures are visible in debug logs.

Refs #990

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-04-22 10:54:10 -04:00
Alexander Whitestone
df1bfe433a fix: add register_memory_provider to PluginContext — fixes #990
PluginContext was missing register_memory_provider(), causing any
user plugin (e.g. MemPalace) that followed the documented pattern
of calling ctx.register_memory_provider(provider) in its register()
function to fail at startup with:

  'PluginContext' object has no attribute 'register_memory_provider'

Changes:
- hermes_cli/plugins.py: Add register_memory_provider() to PluginContext,
  _plugin_memory_provider field to PluginManager, and module-level
  get_plugin_memory_provider() accessor function.
- run_agent.py: After failing to find a provider in plugins/memory/,
  fall back to checking get_plugin_memory_provider() — mirrors how
  context engine plugins are resolved.
- tests: Add TestRegisterMemoryProvider with four regression tests
  covering success, accessor, duplicate rejection, and type validation.

Fixes #990

Co-Authored-By: Claude Sonnet 4.6 <noreply@anthropic.com>
2026-04-22 10:54:10 -04:00
8 changed files with 165 additions and 274 deletions

View File

@@ -211,6 +211,43 @@ class PluginContext:
}
logger.debug("Plugin %s registered CLI command: %s", self.manifest.name, name)
# -- memory provider registration ----------------------------------------
def register_memory_provider(self, provider) -> None:
"""Register a memory provider supplied by this plugin.
The provider must be an instance of ``agent.memory_provider.MemoryProvider``.
Only one plugin-registered memory provider is accepted; a second
attempt is rejected with a warning.
The registered provider is retrievable via
``get_plugin_memory_provider()`` and is picked up by ``run_agent.py``
when ``memory.provider`` in *config.yaml* matches the provider's
``name`` property.
"""
from agent.memory_provider import MemoryProvider
if not isinstance(provider, MemoryProvider):
logger.warning(
"Plugin '%s' tried to register a memory provider that does not "
"inherit from MemoryProvider. Ignoring.",
self.manifest.name,
)
return
if self._manager._plugin_memory_provider is not None:
logger.warning(
"Plugin '%s' tried to register a memory provider, but one is "
"already registered by another plugin. Only one plugin-supplied "
"memory provider is allowed at a time.",
self.manifest.name,
)
return
self._manager._plugin_memory_provider = provider
logger.info(
"Plugin '%s' registered memory provider: %s",
self.manifest.name, provider.name,
)
# -- context engine registration -----------------------------------------
def register_context_engine(self, engine) -> None:
@@ -323,6 +360,7 @@ class PluginManager:
self._plugin_tool_names: Set[str] = set()
self._cli_commands: Dict[str, dict] = {}
self._context_engine = None # Set by a plugin via register_context_engine()
self._plugin_memory_provider = None # Set by a plugin via register_memory_provider()
self._discovered: bool = False
self._cli_ref = None # Set by CLI after plugin discovery
# Plugin skill registry: qualified name → metadata dict.
@@ -699,6 +737,11 @@ def get_plugin_context_engine():
return get_plugin_manager()._context_engine
def get_plugin_memory_provider():
"""Return the plugin-registered memory provider, or None."""
return get_plugin_manager()._plugin_memory_provider
def get_plugin_toolsets() -> List[tuple]:
"""Return plugin toolsets as ``(key, label, description)`` tuples.

View File

@@ -57,7 +57,7 @@ CONFIGURABLE_TOOLSETS = [
("moa", "🧠 Mixture of Agents", "mixture_of_agents"),
("tts", "🔊 Text-to-Speech", "text_to_speech"),
("skills", "📚 Skills", "list, view, manage"),
("todo", "📋 Task Planning", "todo, ultraplan"),
("todo", "📋 Task Planning", "todo"),
("memory", "💾 Memory", "persistent memory across sessions"),
("session_search", "🔎 Session Search", "search past conversations"),
("clarify", "❓ Clarifying Questions", "clarify"),

View File

@@ -1193,6 +1193,18 @@ class AIAgent:
from plugins.memory import load_memory_provider as _load_mem
self._memory_manager = _MemoryManager()
_mp = _load_mem(_mem_provider_name)
# Fall back to a user plugin that called register_memory_provider()
if _mp is None:
try:
from hermes_cli.plugins import get_plugin_memory_provider as _gpm
_candidate = _gpm()
if _candidate and _candidate.name == _mem_provider_name:
_mp = _candidate
except Exception as _gpm_err:
logger.debug(
"get_plugin_memory_provider() failed during fallback lookup: %s",
_gpm_err,
)
if _mp and _mp.is_available():
self._memory_manager.add_provider(_mp)
if self._memory_manager.providers:

View File

@@ -19,6 +19,7 @@ from hermes_cli.plugins import (
PluginManifest,
get_plugin_manager,
get_pre_tool_call_block_message,
get_plugin_memory_provider,
discover_plugins,
invoke_hook,
)
@@ -609,3 +610,105 @@ class TestPreLlmCallTargetRouting:
# in PluginContext (hermes_cli/plugins.py). The tests referenced _plugin_commands,
# commands_registered, get_plugin_command_handler, and GATEWAY_KNOWN_COMMANDS
# integration — all of which are unimplemented features.
# ── TestRegisterMemoryProvider ─────────────────────────────────────────────
class TestRegisterMemoryProvider:
"""Regression tests for PluginContext.register_memory_provider() — issue #990.
The MemPalace plugin (and any user plugin following the developer guide)
calls ``ctx.register_memory_provider(provider)`` inside ``register(ctx)``.
Before the fix, PluginContext had no such method and the plugin failed to
load with: 'PluginContext' object has no attribute 'register_memory_provider'.
"""
def _make_memory_plugin(self, plugins_dir: "Path", name: str) -> None:
"""Write a minimal user plugin that registers a stub MemoryProvider."""
from agent.memory_provider import MemoryProvider
plugin_dir = plugins_dir / name
plugin_dir.mkdir(parents=True, exist_ok=True)
(plugin_dir / "plugin.yaml").write_text(
f"name: {name}\nversion: 0.1.0\ndescription: Stub memory plugin\n"
)
# The register() body imports and calls register_memory_provider — this
# is the exact pattern documented in memory-provider-plugin.md and used
# by third-party plugins such as MemPalace.
(plugin_dir / "__init__.py").write_text(
"from agent.memory_provider import MemoryProvider\n"
"\n"
"class _StubProvider(MemoryProvider):\n"
" @property\n"
f" def name(self): return '{name}'\n"
" def is_available(self): return True\n"
" def initialize(self, session_id, **kw): pass\n"
" def get_tool_schemas(self): return []\n"
"\n"
"def register(ctx):\n"
" ctx.register_memory_provider(_StubProvider())\n"
)
def test_register_memory_provider_succeeds(self, tmp_path, monkeypatch):
"""A user plugin calling register_memory_provider() loads without error."""
plugins_dir = tmp_path / "hermes_test" / "plugins"
self._make_memory_plugin(plugins_dir, "mempalace")
monkeypatch.setenv("HERMES_HOME", str(tmp_path / "hermes_test"))
mgr = PluginManager()
mgr.discover_and_load()
assert "mempalace" in mgr._plugins
assert mgr._plugins["mempalace"].enabled, (
mgr._plugins["mempalace"].error
)
def test_plugin_memory_provider_stored(self, tmp_path, monkeypatch):
"""The provider instance is accessible via get_plugin_memory_provider()."""
import hermes_cli.plugins as plugins_mod
plugins_dir = tmp_path / "hermes_test" / "plugins"
self._make_memory_plugin(plugins_dir, "mempalace")
monkeypatch.setenv("HERMES_HOME", str(tmp_path / "hermes_test"))
mgr = PluginManager()
# Swap the singleton so get_plugin_memory_provider() sees our manager
monkeypatch.setattr(plugins_mod, "_plugin_manager", mgr)
mgr.discover_and_load()
provider = get_plugin_memory_provider()
assert provider is not None
assert provider.name == "mempalace"
def test_second_registration_rejected(self, tmp_path, monkeypatch):
"""Only one plugin-registered memory provider is accepted."""
plugins_dir = tmp_path / "hermes_test" / "plugins"
self._make_memory_plugin(plugins_dir, "first_provider")
self._make_memory_plugin(plugins_dir, "second_provider")
monkeypatch.setenv("HERMES_HOME", str(tmp_path / "hermes_test"))
mgr = PluginManager()
mgr.discover_and_load()
# The manager should hold exactly one provider
assert mgr._plugin_memory_provider is not None
assert mgr._plugin_memory_provider.name in {"first_provider", "second_provider"}
def test_non_provider_rejected(self, tmp_path, monkeypatch):
"""Passing a non-MemoryProvider object logs a warning and is ignored."""
plugins_dir = tmp_path / "hermes_test" / "plugins"
plugin_dir = plugins_dir / "bad_provider"
plugin_dir.mkdir(parents=True, exist_ok=True)
(plugin_dir / "plugin.yaml").write_text("name: bad_provider\n")
(plugin_dir / "__init__.py").write_text(
"def register(ctx):\n"
" ctx.register_memory_provider('not-a-provider')\n"
)
monkeypatch.setenv("HERMES_HOME", str(tmp_path / "hermes_test"))
mgr = PluginManager()
mgr.discover_and_load()
# Plugin still loads (warning only), but no provider is stored
assert mgr._plugin_memory_provider is None

View File

@@ -294,32 +294,22 @@ class TestBuiltinDiscovery:
"tools.browser_tool",
"tools.clarify_tool",
"tools.code_execution_tool",
"tools.crisis_tool",
"tools.cronjob_tools",
"tools.delegate_tool",
"tools.file_tools",
"tools.homeassistant_tool",
"tools.image_generation_tool",
"tools.local_inference_tool",
"tools.memory_tool",
"tools.mixture_of_agents_tool",
"tools.process_registry",
"tools.rl_training_tool",
"tools.scavenger_fixer",
"tools.send_message_tool",
"tools.session_search_tool",
"tools.skill_manager_tool",
"tools.skills_tool",
"tools.sovereign_router",
"tools.sovereign_scavenger",
"tools.sovereign_teleport",
"tools.static_analyzer",
"tools.symbolic_verify",
"tools.terminal_tool",
"tools.todo_tool",
"tools.tts_tool",
"tools.ultraplan",
"tools.verify_tool",
"tools.vision_tools",
"tools.web_tools",
}

View File

@@ -1,81 +0,0 @@
import json
from pathlib import Path
from toolsets import resolve_toolset
from tools.registry import registry
def test_create_action_saves_markdown_and_json(tmp_path):
from tools.ultraplan import ultraplan_tool
result = json.loads(
ultraplan_tool(
action="create",
mission="Daily autonomous planning",
streams=[
{
"id": "A",
"name": "Backlog burn",
"phases": [
{"id": "A1", "name": "Triage", "artifact": "issue list"},
{"id": "A2", "name": "Ship", "dependencies": ["A1"], "artifact": "PR"},
],
}
],
base_dir=str(tmp_path),
)
)
assert result["success"] is True
assert Path(result["file_path"]).exists()
assert Path(result["json_path"]).exists()
assert "Work Streams" in Path(result["file_path"]).read_text(encoding="utf-8")
def test_load_action_returns_saved_plan(tmp_path):
from tools.ultraplan import ultraplan_tool
created = json.loads(
ultraplan_tool(
action="create",
date="20260422",
mission="Mission from saved plan",
base_dir=str(tmp_path),
)
)
loaded = json.loads(
ultraplan_tool(
action="load",
date="20260422",
base_dir=str(tmp_path),
)
)
assert created["success"] is True
assert loaded["success"] is True
assert loaded["plan"]["mission"] == "Mission from saved plan"
assert loaded["file_path"].endswith("ultraplan_20260422.md")
def test_cron_spec_returns_daily_schedule_and_prompt():
from tools.ultraplan import ultraplan_tool
result = json.loads(ultraplan_tool(action="cron_spec"))
assert result["success"] is True
assert result["schedule"] == "0 6 * * *"
assert "Ultraplan" in result["prompt"]
assert "ultraplan_YYYYMMDD.md" in result["prompt"]
def test_registry_registers_ultraplan_tool():
import tools.ultraplan # noqa: F401
entry = registry.get_entry("ultraplan")
assert entry is not None
assert entry.toolset == "todo"
def test_default_toolsets_include_ultraplan():
assert "ultraplan" in resolve_toolset("todo")
assert "ultraplan" in resolve_toolset("hermes-cli")

View File

@@ -290,9 +290,6 @@ def load_ultraplan(date: str, base_dir: Path = None) -> Optional[Ultraplan]:
return None
DEFAULT_ULTRAPLAN_SCHEDULE = "0 6 * * *"
def generate_daily_cron_prompt() -> str:
"""Generate the prompt for the daily ultraplan cron job."""
return """Generate today's Ultraplan.
@@ -301,9 +298,9 @@ Steps:
1. Check open Gitea issues assigned to you
2. Check open PRs needing review
3. Check fleet health status
4. Decompose work into parallel streams with concrete phases and artifacts
5. Use the ultraplan tool to save ~/.timmy/cron/ultraplan_YYYYMMDD.md and the matching JSON sidecar
6. Optionally file a Gitea issue with the plan summary
4. Decompose work into parallel streams
5. Generate ultraplan_YYYYMMDD.md
6. File Gitea issue with the plan
Output format:
- Mission statement
@@ -311,176 +308,3 @@ Output format:
- Dependency map
- Success metrics
"""
def generate_daily_cron_job_spec(schedule: str = DEFAULT_ULTRAPLAN_SCHEDULE) -> Dict[str, str]:
"""Return a reusable cron job spec for daily Ultraplan generation."""
return {
"name": "Daily Ultraplan",
"schedule": schedule,
"prompt": generate_daily_cron_prompt(),
"path_pattern": "~/.timmy/cron/ultraplan_YYYYMMDD.md",
}
def _resolve_base_dir(base_dir: Optional[str | Path]) -> Path:
"""Normalize the requested Ultraplan base directory."""
if base_dir is None:
return Path.home() / ".timmy" / "cron"
return Path(base_dir).expanduser()
def ultraplan_tool(
action: str,
date: Optional[str] = None,
mission: str = "",
streams: Optional[List[Dict[str, Any]]] = None,
metrics: Optional[Dict[str, Any]] = None,
notes: str = "",
base_dir: Optional[str] = None,
) -> str:
"""Create/load Ultraplan artifacts and expose a daily cron spec."""
from tools.registry import tool_error, tool_result
action = (action or "").strip().lower()
resolved_base_dir = _resolve_base_dir(base_dir)
try:
if action == "create":
plan = create_ultraplan(date=date, mission=mission, streams=streams or [])
if metrics:
plan.metrics = metrics
if notes:
plan.notes = notes
md_path = save_ultraplan(plan, base_dir=resolved_base_dir)
json_path = resolved_base_dir / f"ultraplan_{plan.date}.json"
return tool_result(
success=True,
action="create",
date=plan.date,
file_path=str(md_path),
json_path=str(json_path),
plan=plan.to_dict(),
)
if action == "load":
plan_date = date or datetime.now().strftime("%Y%m%d")
plan = load_ultraplan(plan_date, base_dir=resolved_base_dir)
if plan is None:
return tool_error(
f"No Ultraplan found for {plan_date}",
success=False,
action="load",
date=plan_date,
)
return tool_result(
success=True,
action="load",
date=plan.date,
file_path=str(resolved_base_dir / f"ultraplan_{plan.date}.md"),
json_path=str(resolved_base_dir / f"ultraplan_{plan.date}.json"),
plan=plan.to_dict(),
markdown=plan.to_markdown(),
)
if action == "cron_spec":
spec = generate_daily_cron_job_spec()
return tool_result(success=True, action="cron_spec", **spec)
return tool_error(
f"Unknown Ultraplan action: {action}",
success=False,
action=action,
)
except Exception as e:
return tool_error(f"Ultraplan {action or 'tool'} failed: {e}", success=False, action=action)
ULTRAPLAN_SCHEMA = {
"name": "ultraplan",
"description": (
"Create or load daily Ultraplan planning artifacts under ~/.timmy/cron/ and "
"return a reusable cron spec for autonomous planning. Use this when you want "
"a concrete markdown/json plan file with streams, phases, dependencies, and metrics."
),
"parameters": {
"type": "object",
"properties": {
"action": {
"type": "string",
"enum": ["create", "load", "cron_spec"],
"description": "Operation to perform",
},
"date": {
"type": "string",
"description": "Plan date as YYYYMMDD. Defaults to today for create/load.",
},
"mission": {
"type": "string",
"description": "High-level mission statement for today's plan.",
},
"streams": {
"type": "array",
"description": "Optional work streams with phases/artifacts/dependencies for create.",
"items": {
"type": "object",
"properties": {
"id": {"type": "string"},
"name": {"type": "string"},
"phases": {
"type": "array",
"items": {
"type": "object",
"properties": {
"id": {"type": "string"},
"name": {"type": "string"},
"description": {"type": "string"},
"artifact": {"type": "string"},
"dependencies": {
"type": "array",
"items": {"type": "string"},
},
},
"required": ["name"],
},
},
},
"required": ["name"],
},
},
"metrics": {
"type": "object",
"description": "Optional success metrics to store on the plan.",
"additionalProperties": True,
},
"notes": {
"type": "string",
"description": "Optional free-form notes appended to the saved plan.",
},
"base_dir": {
"type": "string",
"description": "Optional override for the Ultraplan storage directory.",
},
},
"required": ["action"],
},
}
from tools.registry import registry
registry.register(
name="ultraplan",
toolset="todo",
schema=ULTRAPLAN_SCHEMA,
handler=lambda args, **_kw: ultraplan_tool(
action=args.get("action", ""),
date=args.get("date"),
mission=args.get("mission", ""),
streams=args.get("streams"),
metrics=args.get("metrics"),
notes=args.get("notes", ""),
base_dir=args.get("base_dir"),
),
emoji="🗺️",
)

View File

@@ -47,7 +47,7 @@ _HERMES_CORE_TOOLS = [
# Text-to-speech
"text_to_speech",
# Planning & memory
"todo", "ultraplan", "memory",
"todo", "memory",
# Session history search
"session_search",
# Clarifying questions
@@ -157,8 +157,8 @@ TOOLSETS = {
},
"todo": {
"description": "Task planning and tracking for multi-step work, including daily Ultraplan artifacts",
"tools": ["todo", "ultraplan"],
"description": "Task planning and tracking for multi-step work",
"tools": ["todo"],
"includes": []
},