Compare commits
2 Commits
fix/749
...
fix/issue-
| Author | SHA1 | Date | |
|---|---|---|---|
| 590b601b5c | |||
| c4aad087d4 |
@@ -1,77 +0,0 @@
|
||||
"""Tests for batch tool execution (#749)."""
|
||||
|
||||
import pytest
|
||||
from tools.batch_executor import (
|
||||
classify_tool_call,
|
||||
classify_batch,
|
||||
)
|
||||
|
||||
|
||||
class TestClassifyToolCall:
|
||||
def test_read_file_is_parallel(self):
|
||||
assert classify_tool_call("read_file") == "parallel"
|
||||
|
||||
def test_search_files_is_parallel(self):
|
||||
assert classify_tool_call("search_files") == "parallel"
|
||||
|
||||
def test_write_file_is_sequential(self):
|
||||
assert classify_tool_call("write_file") == "sequential"
|
||||
|
||||
def test_terminal_is_sequential(self):
|
||||
assert classify_tool_call("terminal") == "sequential"
|
||||
|
||||
def test_execute_code_is_sequential(self):
|
||||
assert classify_tool_call("execute_code") == "sequential"
|
||||
|
||||
def test_cronjob_list_is_parallel(self):
|
||||
assert classify_tool_call("cronjob", {"action": "list"}) == "parallel"
|
||||
|
||||
def test_cronjob_create_is_sequential(self):
|
||||
assert classify_tool_call("cronjob", {"action": "create"}) == "sequential"
|
||||
|
||||
def test_fact_store_search_is_parallel(self):
|
||||
assert classify_tool_call("fact_store", {"action": "search"}) == "parallel"
|
||||
|
||||
def test_fact_store_add_is_sequential(self):
|
||||
assert classify_tool_call("fact_store", {"action": "add"}) == "sequential"
|
||||
|
||||
def test_unknown_tool_is_sequential(self):
|
||||
assert classify_tool_call("unknown_tool") == "sequential"
|
||||
|
||||
|
||||
class TestClassifyBatch:
|
||||
def test_splits_correctly(self):
|
||||
calls = [
|
||||
{"name": "read_file", "args": {"path": "a"}},
|
||||
{"name": "write_file", "args": {"path": "b"}},
|
||||
{"name": "search_files", "args": {"pattern": "c"}},
|
||||
{"name": "terminal", "args": {"command": "d"}},
|
||||
]
|
||||
parallel, sequential = classify_batch(calls)
|
||||
assert len(parallel) == 2
|
||||
assert len(sequential) == 2
|
||||
assert parallel[0]["name"] == "read_file"
|
||||
assert sequential[0]["name"] == "write_file"
|
||||
|
||||
def test_all_parallel(self):
|
||||
calls = [
|
||||
{"name": "read_file", "args": {}},
|
||||
{"name": "search_files", "args": {}},
|
||||
]
|
||||
parallel, sequential = classify_batch(calls)
|
||||
assert len(parallel) == 2
|
||||
assert len(sequential) == 0
|
||||
|
||||
def test_all_sequential(self):
|
||||
calls = [
|
||||
{"name": "write_file", "args": {}},
|
||||
{"name": "terminal", "args": {}},
|
||||
]
|
||||
parallel, sequential = classify_batch(calls)
|
||||
assert len(parallel) == 0
|
||||
assert len(sequential) == 2
|
||||
|
||||
def test_empty(self):
|
||||
parallel, sequential = classify_batch([])
|
||||
assert len(parallel) == 0
|
||||
assert len(sequential) == 0
|
||||
75
tests/test_mcp_pid_lock.py
Normal file
75
tests/test_mcp_pid_lock.py
Normal file
@@ -0,0 +1,75 @@
|
||||
"""Tests for MCP PID file lock (#734)."""
|
||||
|
||||
import os
|
||||
import sys
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent))
|
||||
|
||||
# Override MCP_DIR for testing
|
||||
import tools.mcp_pid_lock as lock_mod
|
||||
_test_dir = Path(tempfile.mkdtemp())
|
||||
lock_mod._MCP_DIR = _test_dir
|
||||
|
||||
|
||||
def test_acquire_and_release():
|
||||
"""Lock can be acquired and released."""
|
||||
pid = lock_mod.acquire_lock("test_server")
|
||||
assert pid == os.getpid()
|
||||
assert lock_mod.is_locked("test_server")
|
||||
lock_mod.release_lock("test_server")
|
||||
assert not lock_mod.is_locked("test_server")
|
||||
|
||||
|
||||
def test_concurrent_lock_blocked():
|
||||
"""Second acquire returns None when server running."""
|
||||
lock_mod.acquire_lock("test_concurrent")
|
||||
result = lock_mod.acquire_lock("test_concurrent")
|
||||
assert result is None
|
||||
lock_mod.release_lock("test_concurrent")
|
||||
|
||||
|
||||
def test_stale_lock_cleaned():
|
||||
"""Stale PID files are cleaned up."""
|
||||
# Write a fake stale PID
|
||||
pid_file = _test_dir / "stale.pid"
|
||||
pid_file.write_text("99999999")
|
||||
assert not lock_mod.is_locked("stale")
|
||||
assert not pid_file.exists()
|
||||
|
||||
|
||||
def test_list_locks():
|
||||
"""list_locks returns only active locks."""
|
||||
lock_mod.acquire_lock("list_test")
|
||||
locks = lock_mod.list_locks()
|
||||
assert "list_test" in locks
|
||||
assert locks["list_test"] == os.getpid()
|
||||
lock_mod.release_lock("list_test")
|
||||
|
||||
|
||||
def test_cleanup_stale():
|
||||
"""cleanup_stale_locks removes dead PID files."""
|
||||
(_test_dir / "dead1.pid").write_text("99999998")
|
||||
(_test_dir / "dead2.pid").write_text("99999999")
|
||||
count = lock_mod.cleanup_stale_locks()
|
||||
assert count >= 2
|
||||
|
||||
|
||||
def test_force_release():
|
||||
"""force_release kills process and removes lock."""
|
||||
lock_mod.acquire_lock("force_test")
|
||||
assert lock_mod.is_locked("force_test")
|
||||
lock_mod.force_release("force_test")
|
||||
assert not lock_mod.is_locked("force_test")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
tests = [test_acquire_and_release, test_concurrent_lock_blocked,
|
||||
test_stale_lock_cleaned, test_list_locks, test_cleanup_stale,
|
||||
test_force_release]
|
||||
for t in tests:
|
||||
print(f"Running {t.__name__}...")
|
||||
t()
|
||||
print(" PASS")
|
||||
print("\nAll tests passed.")
|
||||
@@ -1,250 +0,0 @@
|
||||
"""
|
||||
Batch tool execution with parallel safety checks (#749).
|
||||
|
||||
Classifies tool calls as parallel-safe or sequential, then executes
|
||||
parallel-safe calls concurrently while keeping destructive ops serialized.
|
||||
|
||||
Safety classification:
|
||||
- PARALLEL-SAFE: read_file, search_files, browser_snapshot, session_search,
|
||||
fact_store (search/probe/list), skill_view
|
||||
- SEQUENTIAL: write_file, patch, terminal, execute_code, browser_click,
|
||||
browser_type, browser_navigate, cronjob (create/update/delete),
|
||||
memory (add/update/remove), skill_manage
|
||||
"""
|
||||
|
||||
import asyncio
|
||||
import logging
|
||||
import time
|
||||
from dataclasses import dataclass, field
|
||||
from typing import Any, Callable, Dict, List, Optional, Tuple
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
|
||||
# Tools that only read state — safe to parallelize
|
||||
PARALLEL_SAFE_TOOLS = frozenset([
|
||||
"read_file",
|
||||
"search_files",
|
||||
"browser_snapshot",
|
||||
"browser_get_images",
|
||||
"browser_back",
|
||||
"browser_vision",
|
||||
"browser_console",
|
||||
"session_search",
|
||||
"fact_store", # search/probe/list are read-only; add/update are not
|
||||
"skill_view",
|
||||
"skills_list",
|
||||
"cronjob", # list is read-only; create/update/run are not (filtered below)
|
||||
"clarify", # asking questions is safe
|
||||
"memory", # probe/search/list are read-only
|
||||
"vision_analyze",
|
||||
])
|
||||
|
||||
# Tools that modify state — must be serialized
|
||||
SEQUENTIAL_TOOLS = frozenset([
|
||||
"write_file",
|
||||
"patch",
|
||||
"terminal",
|
||||
"execute_code",
|
||||
"browser_click",
|
||||
"browser_type",
|
||||
"browser_press",
|
||||
"browser_scroll",
|
||||
"browser_navigate",
|
||||
"cronjob", # create/update/run/pause/resume/remove
|
||||
"memory", # add/update/remove
|
||||
"skill_manage",
|
||||
"todo",
|
||||
"text_to_speech",
|
||||
"image_generate",
|
||||
"delegate_task",
|
||||
"clarify", # clarify with choices needs user input
|
||||
"process",
|
||||
])
|
||||
|
||||
# Cronjob sub-actions that are read-only
|
||||
_CRON_READ_ONLY = frozenset(["list"])
|
||||
|
||||
|
||||
@dataclass
|
||||
class BatchResult:
|
||||
"""Result of a batch tool execution."""
|
||||
results: List[Dict[str, Any]] = field(default_factory=list)
|
||||
parallel_count: int = 0
|
||||
sequential_count: int = 0
|
||||
elapsed_ms: float = 0
|
||||
|
||||
|
||||
def classify_tool_call(tool_name: str, tool_args: Optional[Dict] = None) -> str:
|
||||
"""Classify a tool call as 'parallel' or 'sequential'.
|
||||
|
||||
Returns 'parallel' or 'sequential'.
|
||||
"""
|
||||
# Special cases based on sub-action
|
||||
if tool_name == "cronjob":
|
||||
action = (tool_args or {}).get("action", "")
|
||||
if action in _CRON_READ_ONLY:
|
||||
return "parallel"
|
||||
return "sequential"
|
||||
|
||||
if tool_name == "fact_store":
|
||||
action = (tool_args or {}).get("action", "")
|
||||
if action in ("search", "probe", "list", "related", "reason", "contradict"):
|
||||
return "parallel"
|
||||
return "sequential"
|
||||
|
||||
if tool_name == "memory":
|
||||
action = (tool_args or {}).get("action", "")
|
||||
if action in ("probe", "search", "list"):
|
||||
return "parallel"
|
||||
return "sequential"
|
||||
|
||||
# Check sequential first (more restrictive)
|
||||
if tool_name in SEQUENTIAL_TOOLS:
|
||||
return "sequential"
|
||||
|
||||
if tool_name in PARALLEL_SAFE_TOOLS:
|
||||
return "parallel"
|
||||
|
||||
# Unknown tools default to sequential (safe)
|
||||
return "sequential"
|
||||
|
||||
|
||||
def classify_batch(tool_calls: List[Dict]) -> Tuple[List[Dict], List[Dict]]:
|
||||
"""Split a list of tool calls into parallel-safe and sequential groups.
|
||||
|
||||
Args:
|
||||
tool_calls: List of dicts with 'name' and 'args' keys
|
||||
|
||||
Returns:
|
||||
(parallel_calls, sequential_calls)
|
||||
"""
|
||||
parallel = []
|
||||
sequential = []
|
||||
|
||||
for call in tool_calls:
|
||||
name = call.get("name", "")
|
||||
args = call.get("args", {})
|
||||
classification = classify_tool_call(name, args)
|
||||
|
||||
if classification == "parallel":
|
||||
parallel.append(call)
|
||||
else:
|
||||
sequential.append(call)
|
||||
|
||||
return parallel, sequential
|
||||
|
||||
|
||||
async def execute_parallel(
|
||||
tool_calls: List[Dict],
|
||||
executor: Callable,
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Execute parallel-safe tool calls concurrently.
|
||||
|
||||
Args:
|
||||
tool_calls: List of tool call dicts
|
||||
executor: Async callable(tool_name, tool_args) -> result
|
||||
|
||||
Returns:
|
||||
List of results in same order as input
|
||||
"""
|
||||
tasks = []
|
||||
for call in tool_calls:
|
||||
task = asyncio.create_task(
|
||||
executor(call["name"], call.get("args", {})),
|
||||
name=f"tool:{call['name']}"
|
||||
)
|
||||
tasks.append((call, task))
|
||||
|
||||
results = []
|
||||
for call, task in tasks:
|
||||
try:
|
||||
result = await task
|
||||
results.append({
|
||||
"tool_name": call["name"],
|
||||
"result": result,
|
||||
"parallel": True,
|
||||
"error": None,
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error("Parallel tool '%s' failed: %s", call["name"], e)
|
||||
results.append({
|
||||
"tool_name": call["name"],
|
||||
"result": None,
|
||||
"parallel": True,
|
||||
"error": str(e),
|
||||
})
|
||||
|
||||
return results
|
||||
|
||||
|
||||
async def execute_sequential(
|
||||
tool_calls: List[Dict],
|
||||
executor: Callable,
|
||||
) -> List[Dict[str, Any]]:
|
||||
"""Execute sequential tool calls one at a time."""
|
||||
results = []
|
||||
for call in tool_calls:
|
||||
try:
|
||||
result = await executor(call["name"], call.get("args", {}))
|
||||
results.append({
|
||||
"tool_name": call["name"],
|
||||
"result": result,
|
||||
"parallel": False,
|
||||
"error": None,
|
||||
})
|
||||
except Exception as e:
|
||||
logger.error("Sequential tool '%s' failed: %s", call["name"], e)
|
||||
results.append({
|
||||
"tool_name": call["name"],
|
||||
"result": None,
|
||||
"parallel": False,
|
||||
"error": str(e),
|
||||
})
|
||||
|
||||
return results
|
||||
|
||||
|
||||
async def execute_batch(
|
||||
tool_calls: List[Dict],
|
||||
executor: Callable,
|
||||
) -> BatchResult:
|
||||
"""Execute a batch of tool calls with parallel safety checks.
|
||||
|
||||
1. Classify each call as parallel-safe or sequential
|
||||
2. Execute all parallel-safe calls concurrently
|
||||
3. Execute sequential calls one at a time
|
||||
4. Merge results in original order
|
||||
|
||||
Args:
|
||||
tool_calls: List of dicts with 'name' and 'args' keys
|
||||
executor: Async callable(tool_name, tool_args) -> result
|
||||
|
||||
Returns:
|
||||
BatchResult with all results and timing
|
||||
"""
|
||||
start = time.monotonic()
|
||||
|
||||
parallel_calls, sequential_calls = classify_batch(tool_calls)
|
||||
|
||||
# Execute parallel-safe calls concurrently
|
||||
parallel_results = []
|
||||
if parallel_calls:
|
||||
parallel_results = await execute_parallel(parallel_calls, executor)
|
||||
|
||||
# Execute sequential calls in order
|
||||
sequential_results = []
|
||||
if sequential_calls:
|
||||
sequential_results = await execute_sequential(sequential_calls, executor)
|
||||
|
||||
# Merge results — parallel first, then sequential (order preserved within groups)
|
||||
all_results = parallel_results + sequential_results
|
||||
|
||||
elapsed = (time.monotonic() - start) * 1000
|
||||
|
||||
return BatchResult(
|
||||
results=all_results,
|
||||
parallel_count=len(parallel_calls),
|
||||
sequential_count=len(sequential_calls),
|
||||
elapsed_ms=elapsed,
|
||||
)
|
||||
158
tools/mcp_pid_lock.py
Normal file
158
tools/mcp_pid_lock.py
Normal file
@@ -0,0 +1,158 @@
|
||||
"""
|
||||
MCP PID File Lock — Prevent concurrent MCP server instances.
|
||||
|
||||
Uses PID files at ~/.hermes/mcp/{name}.pid to ensure only one instance
|
||||
of each MCP server runs at a time. Prevents zombie accumulation (#714).
|
||||
|
||||
Usage:
|
||||
from tools.mcp_pid_lock import acquire_lock, release_lock, is_locked
|
||||
|
||||
lock = acquire_lock("morrowind")
|
||||
if lock:
|
||||
try:
|
||||
# run server
|
||||
pass
|
||||
finally:
|
||||
release_lock("morrowind")
|
||||
"""
|
||||
|
||||
import fcntl
|
||||
import os
|
||||
import signal
|
||||
import time
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
_MCP_DIR = Path(os.getenv("HERMES_HOME", str(Path.home() / ".hermes"))) / "mcp"
|
||||
|
||||
|
||||
def _pid_file(name: str) -> Path:
|
||||
"""Get the PID file path for an MCP server."""
|
||||
_MCP_DIR.mkdir(parents=True, exist_ok=True)
|
||||
return _MCP_DIR / f"{name}.pid"
|
||||
|
||||
|
||||
def _is_process_alive(pid: int) -> bool:
|
||||
"""Check if a process is running."""
|
||||
try:
|
||||
os.kill(pid, 0) # Signal 0 = check if alive
|
||||
return True
|
||||
except ProcessLookupError:
|
||||
return False
|
||||
except PermissionError:
|
||||
return True # Exists but we can't signal it
|
||||
|
||||
|
||||
def _read_pid_file(name: str) -> Optional[int]:
|
||||
"""Read PID from file, returns None if invalid."""
|
||||
path = _pid_file(name)
|
||||
if not path.exists():
|
||||
return None
|
||||
try:
|
||||
content = path.read_text().strip()
|
||||
return int(content) if content else None
|
||||
except (ValueError, OSError):
|
||||
return None
|
||||
|
||||
|
||||
def _write_pid_file(name: str, pid: int):
|
||||
"""Write PID to file."""
|
||||
path = _pid_file(name)
|
||||
path.write_text(str(pid))
|
||||
|
||||
|
||||
def _remove_pid_file(name: str):
|
||||
"""Remove PID file."""
|
||||
path = _pid_file(name)
|
||||
try:
|
||||
path.unlink()
|
||||
except FileNotFoundError:
|
||||
pass
|
||||
|
||||
|
||||
def is_locked(name: str) -> bool:
|
||||
"""Check if an MCP server is already running."""
|
||||
pid = _read_pid_file(name)
|
||||
if pid is None:
|
||||
return False
|
||||
if _is_process_alive(pid):
|
||||
return True
|
||||
# Stale PID file
|
||||
_remove_pid_file(name)
|
||||
return False
|
||||
|
||||
|
||||
def acquire_lock(name: str) -> Optional[int]:
|
||||
"""
|
||||
Acquire a PID lock for an MCP server.
|
||||
|
||||
Returns the PID if lock acquired, None if server already running.
|
||||
"""
|
||||
# Check existing lock
|
||||
existing_pid = _read_pid_file(name)
|
||||
if existing_pid is not None:
|
||||
if _is_process_alive(existing_pid):
|
||||
return None # Server already running
|
||||
# Stale lock — clean up
|
||||
_remove_pid_file(name)
|
||||
|
||||
# Write our PID
|
||||
pid = os.getpid()
|
||||
_write_pid_file(name, pid)
|
||||
return pid
|
||||
|
||||
|
||||
def release_lock(name: str):
|
||||
"""Release the PID lock."""
|
||||
# Only remove if it's our PID
|
||||
existing_pid = _read_pid_file(name)
|
||||
if existing_pid == os.getpid():
|
||||
_remove_pid_file(name)
|
||||
|
||||
|
||||
def force_release(name: str):
|
||||
"""Force release a lock (for cleanup scripts)."""
|
||||
pid = _read_pid_file(name)
|
||||
if pid and _is_process_alive(pid):
|
||||
try:
|
||||
os.kill(pid, signal.SIGTERM)
|
||||
time.sleep(0.5)
|
||||
if _is_process_alive(pid):
|
||||
os.kill(pid, signal.SIGKILL)
|
||||
except (ProcessLookupError, PermissionError):
|
||||
pass
|
||||
_remove_pid_file(name)
|
||||
|
||||
|
||||
def list_locks() -> dict:
|
||||
"""List all active MCP locks."""
|
||||
locks = {}
|
||||
if not _MCP_DIR.exists():
|
||||
return locks
|
||||
|
||||
for pid_file in _MCP_DIR.glob("*.pid"):
|
||||
name = pid_file.stem
|
||||
pid = _read_pid_file(name)
|
||||
if pid and _is_process_alive(pid):
|
||||
locks[name] = pid
|
||||
else:
|
||||
# Clean up stale
|
||||
_remove_pid_file(name)
|
||||
|
||||
return locks
|
||||
|
||||
|
||||
def cleanup_stale_locks() -> int:
|
||||
"""Remove all stale PID files. Returns count cleaned."""
|
||||
cleaned = 0
|
||||
if not _MCP_DIR.exists():
|
||||
return 0
|
||||
|
||||
for pid_file in _MCP_DIR.glob("*.pid"):
|
||||
name = pid_file.stem
|
||||
pid = _read_pid_file(name)
|
||||
if pid is None or not _is_process_alive(pid):
|
||||
_remove_pid_file(name)
|
||||
cleaned += 1
|
||||
|
||||
return cleaned
|
||||
Reference in New Issue
Block a user