forked from Rockachopa/Timmy-time-dashboard
## Thinking Engine Tests (#1314) - New: tests/timmy/test_thinking_engine.py — 117 tests across 21 test classes - Covers ThinkingEngine core + all 4 mixin classes: - engine.py: init, idle detection, store/retrieve, pruning, dedup, continuity, context assembly, novel thought generation, think_once, journal, broadcast - _distillation.py: should_distill, build_distill_prompt, parse_facts_response, filter_and_store_facts, maybe_distill - _issue_filing.py: references_real_files, get_recent_thoughts_for_issues, build_issue_classify_prompt, parse_issue_items, file_single_issue - _seeds_mixin.py: pick_seed_type, gather_seed, all seed sources, check_workspace - _snapshot.py: system snapshot, memory context, update_memory - _db.py: get_conn, row_to_thought, Thought dataclass - seeds.py: constants, prompt template, think tag regex - Targets 80%+ coverage of engine.py's 430 lines ## Stack Manifest (#986) - New: docs/stack_manifest.json — 8 categories, 40+ tools with pinned versions - LLM Inference, Coding Agents, Image Gen, Music/Voice, Orchestration, Nostr+Lightning+Bitcoin, Memory/KG, Streaming/Content - Schema: {tool, version, role, install_command, license, status} - New: src/timmy/stack_manifest.py — query_stack() runtime tool - Category and tool filtering (case-insensitive, partial match) - Manifest caching, graceful error handling - New: tests/timmy/test_stack_manifest.py — 24 tests - Registered query_stack in tool registry + tool catalog - Total: 141 new tests, all passing
This commit is contained in:
347
docs/stack_manifest.json
Normal file
347
docs/stack_manifest.json
Normal file
@@ -0,0 +1,347 @@
|
||||
{
|
||||
"$schema": "https://json-schema.org/draft/2020-12/schema",
|
||||
"title": "Timmy Sovereign Tech Stack Manifest",
|
||||
"description": "Machine-readable catalog of every tool in the sovereign stack. Queryable by Timmy at runtime via query_stack().",
|
||||
"version": "1.0.0",
|
||||
"generated": "2026-03-24",
|
||||
"source_issue": "#986",
|
||||
"parent_issue": "#982",
|
||||
"categories": [
|
||||
{
|
||||
"id": "llm_inference",
|
||||
"name": "Local LLM Inference",
|
||||
"description": "On-device language model serving — no cloud required",
|
||||
"tools": [
|
||||
{
|
||||
"tool": "vllm-mlx",
|
||||
"version": "latest",
|
||||
"role": "High-throughput LLM inference on Apple Silicon via MLX backend",
|
||||
"install_command": "pip install vllm-mlx",
|
||||
"license": "Apache-2.0",
|
||||
"status": "active"
|
||||
},
|
||||
{
|
||||
"tool": "Ollama",
|
||||
"version": "0.18.2",
|
||||
"role": "Primary local LLM runtime — serves Qwen3, Llama, DeepSeek models",
|
||||
"install_command": "curl -fsSL https://ollama.com/install.sh | sh",
|
||||
"license": "MIT",
|
||||
"status": "active"
|
||||
},
|
||||
{
|
||||
"tool": "mlx-lm",
|
||||
"version": "0.31.1",
|
||||
"role": "Apple MLX native language model inference and fine-tuning",
|
||||
"install_command": "pip install mlx-lm==0.31.1",
|
||||
"license": "MIT",
|
||||
"status": "active"
|
||||
},
|
||||
{
|
||||
"tool": "exo",
|
||||
"version": "1.0-EA",
|
||||
"role": "Distributed LLM inference across heterogeneous devices",
|
||||
"install_command": "pip install exo",
|
||||
"license": "GPL-3.0",
|
||||
"status": "experimental"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "coding_agents",
|
||||
"name": "AI Coding Agents",
|
||||
"description": "Autonomous code generation, review, and self-modification",
|
||||
"tools": [
|
||||
{
|
||||
"tool": "Goose",
|
||||
"version": "1.20.1",
|
||||
"role": "AI coding agent for autonomous code generation and refactoring",
|
||||
"install_command": "brew install block/goose/goose",
|
||||
"license": "Apache-2.0",
|
||||
"status": "active"
|
||||
},
|
||||
{
|
||||
"tool": "OpenHands",
|
||||
"version": "1.5.0",
|
||||
"role": "Open-source AI software engineer for complex multi-file changes",
|
||||
"install_command": "pip install openhands==1.5.0",
|
||||
"license": "MIT",
|
||||
"status": "active"
|
||||
},
|
||||
{
|
||||
"tool": "Aider",
|
||||
"version": "latest",
|
||||
"role": "AI pair programmer using local Ollama models (qwen3, deepseek-coder)",
|
||||
"install_command": "pip install aider-chat",
|
||||
"license": "Apache-2.0",
|
||||
"status": "active"
|
||||
},
|
||||
{
|
||||
"tool": "mini-swe-agent",
|
||||
"version": "2.0",
|
||||
"role": "Lightweight software engineering agent for targeted fixes",
|
||||
"install_command": "pip install mini-swe-agent",
|
||||
"license": "MIT",
|
||||
"status": "active"
|
||||
},
|
||||
{
|
||||
"tool": "Forgejo",
|
||||
"version": "14.0.3",
|
||||
"role": "Self-hosted Git forge (Gitea fork) — sovereign code hosting",
|
||||
"install_command": "docker pull forgejo/forgejo:14.0.3",
|
||||
"license": "MIT",
|
||||
"status": "active"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "image_generation",
|
||||
"name": "Image Generation",
|
||||
"description": "Local image synthesis — avatars, art, visual content",
|
||||
"tools": [
|
||||
{
|
||||
"tool": "ComfyUI",
|
||||
"version": "0.17.2",
|
||||
"role": "Node-based image generation pipeline with FLUX model support",
|
||||
"install_command": "git clone https://github.com/comfyanonymous/ComfyUI && pip install -r requirements.txt",
|
||||
"license": "GPL-3.0",
|
||||
"status": "active"
|
||||
},
|
||||
{
|
||||
"tool": "Draw Things",
|
||||
"version": "latest",
|
||||
"role": "macOS-native image generation app with Metal acceleration",
|
||||
"install_command": "mas install 6450292044",
|
||||
"license": "Proprietary (free)",
|
||||
"status": "active"
|
||||
},
|
||||
{
|
||||
"tool": "FLUX.1 Dev GGUF Q8",
|
||||
"version": "1.0",
|
||||
"role": "Quantized FLUX.1 model for high-quality local image generation",
|
||||
"install_command": "ollama pull flux.1-dev-q8",
|
||||
"license": "FLUX.1-dev-non-commercial",
|
||||
"status": "active"
|
||||
},
|
||||
{
|
||||
"tool": "FLUX.2 Klein",
|
||||
"version": "2.0",
|
||||
"role": "Fast lightweight FLUX model for rapid image prototyping",
|
||||
"install_command": "comfyui-manager install flux2-klein",
|
||||
"license": "Apache-2.0",
|
||||
"status": "active"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "music_voice",
|
||||
"name": "Music and Voice",
|
||||
"description": "Audio synthesis — music generation, text-to-speech, voice cloning",
|
||||
"tools": [
|
||||
{
|
||||
"tool": "ACE-Step",
|
||||
"version": "1.5",
|
||||
"role": "Local music generation — 30s loops in under 60s on Apple Silicon",
|
||||
"install_command": "pip install ace-step==1.5",
|
||||
"license": "Apache-2.0",
|
||||
"status": "active"
|
||||
},
|
||||
{
|
||||
"tool": "mlx-audio",
|
||||
"version": "0.4.1",
|
||||
"role": "Apple MLX native audio processing and text-to-speech",
|
||||
"install_command": "pip install mlx-audio==0.4.1",
|
||||
"license": "MIT",
|
||||
"status": "active"
|
||||
},
|
||||
{
|
||||
"tool": "Piper TTS",
|
||||
"version": "1.4.1",
|
||||
"role": "Fast local neural text-to-speech with multiple voice models",
|
||||
"install_command": "pip install piper-tts==1.4.1",
|
||||
"license": "MIT",
|
||||
"status": "active"
|
||||
},
|
||||
{
|
||||
"tool": "GPT-SoVITS",
|
||||
"version": "v2pro",
|
||||
"role": "Voice cloning and singing voice synthesis from few-shot samples",
|
||||
"install_command": "git clone https://github.com/RVC-Boss/GPT-SoVITS && pip install -r requirements.txt",
|
||||
"license": "MIT",
|
||||
"status": "active"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "agent_orchestration",
|
||||
"name": "Agent Orchestration",
|
||||
"description": "Multi-agent coordination, MCP servers, workflow engines",
|
||||
"tools": [
|
||||
{
|
||||
"tool": "FastMCP",
|
||||
"version": "3.1.1",
|
||||
"role": "Model Context Protocol server framework — tool registration for agents",
|
||||
"install_command": "pip install fastmcp==3.1.1",
|
||||
"license": "MIT",
|
||||
"status": "active"
|
||||
},
|
||||
{
|
||||
"tool": "PocketFlow",
|
||||
"version": "latest",
|
||||
"role": "Lightweight agent workflow engine for multi-step task orchestration",
|
||||
"install_command": "pip install pocketflow",
|
||||
"license": "MIT",
|
||||
"status": "active"
|
||||
},
|
||||
{
|
||||
"tool": "CrewAI",
|
||||
"version": "1.11.0",
|
||||
"role": "Multi-agent collaboration framework for complex task decomposition",
|
||||
"install_command": "pip install crewai==1.11.0",
|
||||
"license": "MIT",
|
||||
"status": "active"
|
||||
},
|
||||
{
|
||||
"tool": "Agno",
|
||||
"version": "2.5.10",
|
||||
"role": "Core agent framework powering Timmy — tool registration, conversation management",
|
||||
"install_command": "pip install agno==2.5.10",
|
||||
"license": "MIT",
|
||||
"status": "active"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "nostr_lightning_bitcoin",
|
||||
"name": "Nostr + Lightning + Bitcoin",
|
||||
"description": "Sovereign identity, censorship-resistant communication, and value transfer",
|
||||
"tools": [
|
||||
{
|
||||
"tool": "nostr-sdk",
|
||||
"version": "0.44.2",
|
||||
"role": "Python SDK for Nostr protocol — sovereign decentralized identity",
|
||||
"install_command": "pip install nostr-sdk==0.44.2",
|
||||
"license": "MIT",
|
||||
"status": "active"
|
||||
},
|
||||
{
|
||||
"tool": "nostrdvm",
|
||||
"version": "latest",
|
||||
"role": "Nostr Data Vending Machine — publish AI services on Nostr marketplace",
|
||||
"install_command": "pip install nostrdvm",
|
||||
"license": "MIT",
|
||||
"status": "active"
|
||||
},
|
||||
{
|
||||
"tool": "LND",
|
||||
"version": "0.20.1",
|
||||
"role": "Lightning Network Daemon — sovereign Bitcoin payment channel management",
|
||||
"install_command": "brew install lnd",
|
||||
"license": "MIT",
|
||||
"status": "active"
|
||||
},
|
||||
{
|
||||
"tool": "LN agent-tools",
|
||||
"version": "latest",
|
||||
"role": "Lightning Network integration tools for AI agents — invoice creation, payment",
|
||||
"install_command": "pip install ln-agent-tools",
|
||||
"license": "MIT",
|
||||
"status": "active"
|
||||
},
|
||||
{
|
||||
"tool": "LNbits",
|
||||
"version": "1.4",
|
||||
"role": "Lightning Network wallet and extensions platform — API-first payments",
|
||||
"install_command": "docker pull lnbits/lnbits:1.4",
|
||||
"license": "MIT",
|
||||
"status": "active"
|
||||
},
|
||||
{
|
||||
"tool": "Cashu",
|
||||
"version": "0.17.0",
|
||||
"role": "Ecash protocol for private Lightning-backed digital cash",
|
||||
"install_command": "pip install cashu==0.17.0",
|
||||
"license": "MIT",
|
||||
"status": "active"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "memory_knowledge_graphs",
|
||||
"name": "Memory and Knowledge Graphs",
|
||||
"description": "Persistent memory, vector search, knowledge graph construction",
|
||||
"tools": [
|
||||
{
|
||||
"tool": "Graphiti",
|
||||
"version": "0.28.2",
|
||||
"role": "Episodic memory via temporal knowledge graphs — remember conversations",
|
||||
"install_command": "pip install graphiti==0.28.2",
|
||||
"license": "Apache-2.0",
|
||||
"status": "active"
|
||||
},
|
||||
{
|
||||
"tool": "Neo4j",
|
||||
"version": "2026.02",
|
||||
"role": "Graph database backend for knowledge graph storage and traversal",
|
||||
"install_command": "docker pull neo4j:2026.02",
|
||||
"license": "GPL-3.0 (Community)",
|
||||
"status": "active"
|
||||
},
|
||||
{
|
||||
"tool": "ChromaDB",
|
||||
"version": "1.5.5",
|
||||
"role": "Local vector database for semantic search over embeddings",
|
||||
"install_command": "pip install chromadb==1.5.5",
|
||||
"license": "Apache-2.0",
|
||||
"status": "active"
|
||||
},
|
||||
{
|
||||
"tool": "Mem0",
|
||||
"version": "1.0.5",
|
||||
"role": "Self-improving memory layer for AI agents — fact extraction and recall",
|
||||
"install_command": "pip install mem0ai==1.0.5",
|
||||
"license": "Apache-2.0",
|
||||
"status": "active"
|
||||
}
|
||||
]
|
||||
},
|
||||
{
|
||||
"id": "streaming_content",
|
||||
"name": "Streaming and Content",
|
||||
"description": "Video streaming, recording, editing, and content production",
|
||||
"tools": [
|
||||
{
|
||||
"tool": "MediaMTX",
|
||||
"version": "1.16.3",
|
||||
"role": "RTSP/RTMP/HLS media server for streaming game footage and AI output",
|
||||
"install_command": "docker pull bluenviron/mediamtx:1.16.3",
|
||||
"license": "MIT",
|
||||
"status": "active"
|
||||
},
|
||||
{
|
||||
"tool": "OBS",
|
||||
"version": "32.0.4",
|
||||
"role": "Open Broadcaster Software — screen capture, scene composition, streaming",
|
||||
"install_command": "brew install --cask obs",
|
||||
"license": "GPL-2.0",
|
||||
"status": "active"
|
||||
},
|
||||
{
|
||||
"tool": "obsws-python",
|
||||
"version": "latest",
|
||||
"role": "Python client for OBS WebSocket — programmatic recording and scene control",
|
||||
"install_command": "pip install obsws-python",
|
||||
"license": "MIT",
|
||||
"status": "active"
|
||||
},
|
||||
{
|
||||
"tool": "MoviePy",
|
||||
"version": "2.1.2",
|
||||
"role": "Python video editing — clip assembly, overlay, sub-5-min episode production",
|
||||
"install_command": "pip install moviepy==2.1.2",
|
||||
"license": "MIT",
|
||||
"status": "active"
|
||||
}
|
||||
]
|
||||
}
|
||||
]
|
||||
}
|
||||
160
src/timmy/stack_manifest.py
Normal file
160
src/timmy/stack_manifest.py
Normal file
@@ -0,0 +1,160 @@
|
||||
"""Sovereign tech stack manifest — machine-readable catalog with runtime query tool.
|
||||
|
||||
Loads ``docs/stack_manifest.json`` and exposes ``query_stack()`` for Timmy to
|
||||
introspect his own technology stack at runtime.
|
||||
|
||||
Issue: #986 (parent: #982 Session Crystallization)
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import logging
|
||||
from pathlib import Path
|
||||
from typing import Any
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Resolve project root: this file lives at src/timmy/stack_manifest.py
|
||||
# Project root is two levels up from src/timmy/
|
||||
_PROJECT_ROOT = Path(__file__).resolve().parent.parent.parent
|
||||
_MANIFEST_PATH = _PROJECT_ROOT / "docs" / "stack_manifest.json"
|
||||
|
||||
# Cached manifest (loaded on first access)
|
||||
_manifest_cache: dict[str, Any] | None = None
|
||||
|
||||
|
||||
def _load_manifest(path: Path | None = None) -> dict[str, Any]:
|
||||
"""Load and cache the stack manifest from disk.
|
||||
|
||||
Args:
|
||||
path: Override manifest path (useful for testing).
|
||||
|
||||
Returns:
|
||||
The parsed manifest dict.
|
||||
|
||||
Raises:
|
||||
FileNotFoundError: If the manifest file doesn't exist.
|
||||
json.JSONDecodeError: If the manifest is invalid JSON.
|
||||
"""
|
||||
global _manifest_cache
|
||||
|
||||
target = path or _MANIFEST_PATH
|
||||
|
||||
if _manifest_cache is not None and path is None:
|
||||
return _manifest_cache
|
||||
|
||||
with open(target, encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
|
||||
if path is None:
|
||||
_manifest_cache = data
|
||||
return data
|
||||
|
||||
|
||||
def _reset_cache() -> None:
|
||||
"""Reset the manifest cache (for testing)."""
|
||||
global _manifest_cache
|
||||
_manifest_cache = None
|
||||
|
||||
|
||||
def _match_tool(tool: dict, category: str | None, tool_name: str | None) -> bool:
|
||||
"""Check if a tool entry matches the given filters.
|
||||
|
||||
Matching is case-insensitive and supports partial matches.
|
||||
"""
|
||||
if tool_name:
|
||||
name_lower = tool_name.lower()
|
||||
tool_lower = tool["tool"].lower()
|
||||
if name_lower not in tool_lower and tool_lower not in name_lower:
|
||||
return False
|
||||
return True
|
||||
|
||||
|
||||
def query_stack(
|
||||
category: str | None = None,
|
||||
tool: str | None = None,
|
||||
) -> str:
|
||||
"""Query Timmy's sovereign tech stack manifest.
|
||||
|
||||
Use this tool to discover what tools, frameworks, and services are available
|
||||
in the sovereign stack — with exact versions, install commands, and roles.
|
||||
|
||||
Args:
|
||||
category: Filter by category name or ID (e.g., 'llm_inference',
|
||||
'Music and Voice', 'nostr'). Case-insensitive, partial match.
|
||||
tool: Filter by tool name (e.g., 'Ollama', 'FastMCP', 'Neo4j').
|
||||
Case-insensitive, partial match.
|
||||
|
||||
Returns:
|
||||
Formatted string listing matching tools with version, role, install
|
||||
command, license, and status. Returns a summary if no filters given.
|
||||
|
||||
Examples:
|
||||
query_stack() → Full stack summary
|
||||
query_stack(category="llm") → All LLM inference tools
|
||||
query_stack(tool="Ollama") → Ollama details
|
||||
query_stack(category="nostr", tool="LND") → LND in the Nostr category
|
||||
"""
|
||||
try:
|
||||
manifest = _load_manifest()
|
||||
except FileNotFoundError:
|
||||
return "Stack manifest not found. Run from the project root or check docs/stack_manifest.json."
|
||||
except json.JSONDecodeError as exc:
|
||||
return f"Stack manifest is invalid JSON: {exc}"
|
||||
|
||||
categories = manifest.get("categories", [])
|
||||
results: list[str] = []
|
||||
match_count = 0
|
||||
|
||||
for cat in categories:
|
||||
cat_id = cat.get("id", "")
|
||||
cat_name = cat.get("name", "")
|
||||
|
||||
# Category filter
|
||||
if category:
|
||||
cat_lower = category.lower()
|
||||
if (
|
||||
cat_lower not in cat_id.lower()
|
||||
and cat_lower not in cat_name.lower()
|
||||
):
|
||||
continue
|
||||
|
||||
cat_tools = cat.get("tools", [])
|
||||
matching_tools = []
|
||||
|
||||
for t in cat_tools:
|
||||
if _match_tool(t, category, tool):
|
||||
matching_tools.append(t)
|
||||
match_count += 1
|
||||
|
||||
if matching_tools:
|
||||
results.append(f"\n## {cat_name} ({cat_id})")
|
||||
results.append(f"{cat.get('description', '')}\n")
|
||||
for t in matching_tools:
|
||||
status_badge = f" [{t['status'].upper()}]" if t.get("status") != "active" else ""
|
||||
results.append(f" **{t['tool']}** v{t['version']}{status_badge}")
|
||||
results.append(f" Role: {t['role']}")
|
||||
results.append(f" Install: `{t['install_command']}`")
|
||||
results.append(f" License: {t['license']}")
|
||||
results.append("")
|
||||
|
||||
if not results:
|
||||
if category and tool:
|
||||
return f'No tools found matching category="{category}", tool="{tool}".'
|
||||
if category:
|
||||
return f'No category matching "{category}". Available: {", ".join(c["id"] for c in categories)}'
|
||||
if tool:
|
||||
return f'No tool matching "{tool}" in any category.'
|
||||
return "Stack manifest is empty."
|
||||
|
||||
header = f"Sovereign Tech Stack — {match_count} tool(s) matched"
|
||||
if category:
|
||||
header += f' (category: "{category}")'
|
||||
if tool:
|
||||
header += f' (tool: "{tool}")'
|
||||
|
||||
version = manifest.get("version", "unknown")
|
||||
footer = f"\n---\nManifest v{version} | Source: docs/stack_manifest.json"
|
||||
|
||||
return header + "\n" + "\n".join(results) + footer
|
||||
@@ -244,6 +244,17 @@ def _register_thinking_tools(toolkit: Toolkit) -> None:
|
||||
raise
|
||||
|
||||
|
||||
def _register_stack_manifest_tool(toolkit: Toolkit) -> None:
|
||||
"""Register the sovereign tech stack query tool."""
|
||||
try:
|
||||
from timmy.stack_manifest import query_stack
|
||||
|
||||
toolkit.register(query_stack, name="query_stack")
|
||||
except (ImportError, AttributeError) as exc:
|
||||
logger.error("Failed to register query_stack tool: %s", exc)
|
||||
raise
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Full toolkit factories
|
||||
# ---------------------------------------------------------------------------
|
||||
@@ -281,6 +292,7 @@ def create_full_toolkit(base_dir: str | Path | None = None):
|
||||
_register_gematria_tool(toolkit)
|
||||
_register_artifact_tools(toolkit)
|
||||
_register_thinking_tools(toolkit)
|
||||
_register_stack_manifest_tool(toolkit)
|
||||
|
||||
# Gitea issue management is now provided by the gitea-mcp server
|
||||
# (wired in as MCPTools in agent.py, not registered here)
|
||||
@@ -507,6 +519,11 @@ def _introspection_tool_catalog() -> dict:
|
||||
"description": "Review recent conversations to spot patterns, low-confidence answers, and errors",
|
||||
"available_in": ["orchestrator"],
|
||||
},
|
||||
"query_stack": {
|
||||
"name": "Query Stack",
|
||||
"description": "Query the sovereign tech stack manifest — discover tools, versions, and install commands",
|
||||
"available_in": ["orchestrator"],
|
||||
},
|
||||
"update_gitea_avatar": {
|
||||
"name": "Update Gitea Avatar",
|
||||
"description": "Generate and upload a wizard-themed avatar to Timmy's Gitea profile",
|
||||
|
||||
406
tests/timmy/test_stack_manifest.py
Normal file
406
tests/timmy/test_stack_manifest.py
Normal file
@@ -0,0 +1,406 @@
|
||||
"""Tests for timmy.stack_manifest — sovereign tech stack query tool.
|
||||
|
||||
Issue: #986
|
||||
"""
|
||||
|
||||
import json
|
||||
from pathlib import Path
|
||||
from unittest.mock import patch
|
||||
|
||||
import pytest
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
_MINI_MANIFEST = {
|
||||
"version": "1.0.0",
|
||||
"categories": [
|
||||
{
|
||||
"id": "llm_inference",
|
||||
"name": "Local LLM Inference",
|
||||
"description": "On-device language model serving",
|
||||
"tools": [
|
||||
{
|
||||
"tool": "Ollama",
|
||||
"version": "0.18.2",
|
||||
"role": "Primary local LLM runtime",
|
||||
"install_command": "curl -fsSL https://ollama.com/install.sh | sh",
|
||||
"license": "MIT",
|
||||
"status": "active",
|
||||
},
|
||||
{
|
||||
"tool": "mlx-lm",
|
||||
"version": "0.31.1",
|
||||
"role": "Apple MLX native inference",
|
||||
"install_command": "pip install mlx-lm==0.31.1",
|
||||
"license": "MIT",
|
||||
"status": "active",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
"id": "agent_orchestration",
|
||||
"name": "Agent Orchestration",
|
||||
"description": "Multi-agent coordination",
|
||||
"tools": [
|
||||
{
|
||||
"tool": "FastMCP",
|
||||
"version": "3.1.1",
|
||||
"role": "MCP server framework",
|
||||
"install_command": "pip install fastmcp==3.1.1",
|
||||
"license": "MIT",
|
||||
"status": "active",
|
||||
},
|
||||
{
|
||||
"tool": "Agno",
|
||||
"version": "2.5.10",
|
||||
"role": "Core agent framework",
|
||||
"install_command": "pip install agno==2.5.10",
|
||||
"license": "MIT",
|
||||
"status": "active",
|
||||
},
|
||||
],
|
||||
},
|
||||
{
|
||||
"id": "nostr_lightning",
|
||||
"name": "Nostr + Lightning + Bitcoin",
|
||||
"description": "Sovereign identity and value transfer",
|
||||
"tools": [
|
||||
{
|
||||
"tool": "LND",
|
||||
"version": "0.20.1",
|
||||
"role": "Lightning Network Daemon",
|
||||
"install_command": "brew install lnd",
|
||||
"license": "MIT",
|
||||
"status": "active",
|
||||
},
|
||||
{
|
||||
"tool": "exo-experimental",
|
||||
"version": "1.0",
|
||||
"role": "Test tool",
|
||||
"install_command": "pip install exo",
|
||||
"license": "GPL-3.0",
|
||||
"status": "experimental",
|
||||
},
|
||||
],
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
def _write_manifest(tmp_path: Path, data: dict | None = None) -> Path:
|
||||
"""Write a test manifest file and return its path."""
|
||||
path = tmp_path / "stack_manifest.json"
|
||||
path.write_text(json.dumps(data or _MINI_MANIFEST, indent=2))
|
||||
return path
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# _load_manifest
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestLoadManifest:
|
||||
"""Manifest loading and caching."""
|
||||
|
||||
def test_loads_from_file(self, tmp_path):
|
||||
from timmy.stack_manifest import _load_manifest
|
||||
|
||||
path = _write_manifest(tmp_path)
|
||||
data = _load_manifest(path)
|
||||
assert data["version"] == "1.0.0"
|
||||
assert len(data["categories"]) == 3
|
||||
|
||||
def test_raises_on_missing_file(self, tmp_path):
|
||||
from timmy.stack_manifest import _load_manifest
|
||||
|
||||
with pytest.raises(FileNotFoundError):
|
||||
_load_manifest(tmp_path / "nonexistent.json")
|
||||
|
||||
def test_raises_on_invalid_json(self, tmp_path):
|
||||
from timmy.stack_manifest import _load_manifest
|
||||
|
||||
bad = tmp_path / "bad.json"
|
||||
bad.write_text("{invalid json")
|
||||
with pytest.raises(json.JSONDecodeError):
|
||||
_load_manifest(bad)
|
||||
|
||||
def test_caching_works(self, tmp_path):
|
||||
from timmy.stack_manifest import _load_manifest, _reset_cache
|
||||
|
||||
_reset_cache()
|
||||
path = _write_manifest(tmp_path)
|
||||
# Override the module-level path for caching test
|
||||
with patch("timmy.stack_manifest._MANIFEST_PATH", path):
|
||||
data1 = _load_manifest()
|
||||
data2 = _load_manifest()
|
||||
assert data1 is data2 # Same object — cached
|
||||
_reset_cache()
|
||||
|
||||
def test_reset_cache_clears(self, tmp_path):
|
||||
from timmy.stack_manifest import _load_manifest, _reset_cache
|
||||
|
||||
_reset_cache()
|
||||
path = _write_manifest(tmp_path)
|
||||
_load_manifest(path)
|
||||
_reset_cache()
|
||||
from timmy import stack_manifest
|
||||
|
||||
assert stack_manifest._manifest_cache is None
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# query_stack — no filters
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestQueryStackNoFilters:
|
||||
"""query_stack() with no arguments — full summary."""
|
||||
|
||||
def test_returns_all_tools(self, tmp_path):
|
||||
from timmy.stack_manifest import _reset_cache, query_stack
|
||||
|
||||
_reset_cache()
|
||||
path = _write_manifest(tmp_path)
|
||||
with patch("timmy.stack_manifest._MANIFEST_PATH", path):
|
||||
result = query_stack()
|
||||
_reset_cache()
|
||||
assert "6 tool(s) matched" in result # 2 + 2 + 2 (all tools counted)
|
||||
assert "Ollama" in result
|
||||
assert "FastMCP" in result
|
||||
assert "LND" in result
|
||||
|
||||
def test_includes_manifest_version(self, tmp_path):
|
||||
from timmy.stack_manifest import _reset_cache, query_stack
|
||||
|
||||
_reset_cache()
|
||||
path = _write_manifest(tmp_path)
|
||||
with patch("timmy.stack_manifest._MANIFEST_PATH", path):
|
||||
result = query_stack()
|
||||
_reset_cache()
|
||||
assert "v1.0.0" in result
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# query_stack — category filter
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestQueryStackCategoryFilter:
|
||||
"""query_stack(category=...) filtering."""
|
||||
|
||||
def test_filter_by_category_id(self, tmp_path):
|
||||
from timmy.stack_manifest import _reset_cache, query_stack
|
||||
|
||||
_reset_cache()
|
||||
path = _write_manifest(tmp_path)
|
||||
with patch("timmy.stack_manifest._MANIFEST_PATH", path):
|
||||
result = query_stack(category="llm_inference")
|
||||
_reset_cache()
|
||||
assert "Ollama" in result
|
||||
assert "mlx-lm" in result
|
||||
assert "FastMCP" not in result
|
||||
|
||||
def test_filter_by_partial_category(self, tmp_path):
|
||||
from timmy.stack_manifest import _reset_cache, query_stack
|
||||
|
||||
_reset_cache()
|
||||
path = _write_manifest(tmp_path)
|
||||
with patch("timmy.stack_manifest._MANIFEST_PATH", path):
|
||||
result = query_stack(category="nostr")
|
||||
_reset_cache()
|
||||
assert "LND" in result
|
||||
assert "Ollama" not in result
|
||||
|
||||
def test_filter_by_category_name(self, tmp_path):
|
||||
from timmy.stack_manifest import _reset_cache, query_stack
|
||||
|
||||
_reset_cache()
|
||||
path = _write_manifest(tmp_path)
|
||||
with patch("timmy.stack_manifest._MANIFEST_PATH", path):
|
||||
result = query_stack(category="Agent Orchestration")
|
||||
_reset_cache()
|
||||
assert "FastMCP" in result
|
||||
assert "Agno" in result
|
||||
|
||||
def test_no_matching_category(self, tmp_path):
|
||||
from timmy.stack_manifest import _reset_cache, query_stack
|
||||
|
||||
_reset_cache()
|
||||
path = _write_manifest(tmp_path)
|
||||
with patch("timmy.stack_manifest._MANIFEST_PATH", path):
|
||||
result = query_stack(category="quantum_computing")
|
||||
_reset_cache()
|
||||
assert "No category matching" in result
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# query_stack — tool filter
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestQueryStackToolFilter:
|
||||
"""query_stack(tool=...) filtering."""
|
||||
|
||||
def test_filter_by_exact_tool(self, tmp_path):
|
||||
from timmy.stack_manifest import _reset_cache, query_stack
|
||||
|
||||
_reset_cache()
|
||||
path = _write_manifest(tmp_path)
|
||||
with patch("timmy.stack_manifest._MANIFEST_PATH", path):
|
||||
result = query_stack(tool="Ollama")
|
||||
_reset_cache()
|
||||
assert "Ollama" in result
|
||||
assert "0.18.2" in result
|
||||
assert "FastMCP" not in result
|
||||
|
||||
def test_filter_by_partial_tool(self, tmp_path):
|
||||
from timmy.stack_manifest import _reset_cache, query_stack
|
||||
|
||||
_reset_cache()
|
||||
path = _write_manifest(tmp_path)
|
||||
with patch("timmy.stack_manifest._MANIFEST_PATH", path):
|
||||
result = query_stack(tool="mcp")
|
||||
_reset_cache()
|
||||
assert "FastMCP" in result
|
||||
|
||||
def test_case_insensitive_tool(self, tmp_path):
|
||||
from timmy.stack_manifest import _reset_cache, query_stack
|
||||
|
||||
_reset_cache()
|
||||
path = _write_manifest(tmp_path)
|
||||
with patch("timmy.stack_manifest._MANIFEST_PATH", path):
|
||||
result = query_stack(tool="ollama")
|
||||
_reset_cache()
|
||||
assert "Ollama" in result
|
||||
|
||||
def test_no_matching_tool(self, tmp_path):
|
||||
from timmy.stack_manifest import _reset_cache, query_stack
|
||||
|
||||
_reset_cache()
|
||||
path = _write_manifest(tmp_path)
|
||||
with patch("timmy.stack_manifest._MANIFEST_PATH", path):
|
||||
result = query_stack(tool="nonexistent-tool")
|
||||
_reset_cache()
|
||||
assert "No tool matching" in result
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# query_stack — combined filters
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestQueryStackCombinedFilters:
|
||||
"""query_stack(category=..., tool=...) combined filtering."""
|
||||
|
||||
def test_category_and_tool(self, tmp_path):
|
||||
from timmy.stack_manifest import _reset_cache, query_stack
|
||||
|
||||
_reset_cache()
|
||||
path = _write_manifest(tmp_path)
|
||||
with patch("timmy.stack_manifest._MANIFEST_PATH", path):
|
||||
result = query_stack(category="nostr", tool="LND")
|
||||
_reset_cache()
|
||||
assert "LND" in result
|
||||
assert "1 tool(s) matched" in result
|
||||
|
||||
def test_category_and_tool_no_match(self, tmp_path):
|
||||
from timmy.stack_manifest import _reset_cache, query_stack
|
||||
|
||||
_reset_cache()
|
||||
path = _write_manifest(tmp_path)
|
||||
with patch("timmy.stack_manifest._MANIFEST_PATH", path):
|
||||
result = query_stack(category="llm_inference", tool="LND")
|
||||
_reset_cache()
|
||||
assert "No tools found" in result
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# query_stack — error handling
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestQueryStackErrors:
|
||||
"""Error handling in query_stack."""
|
||||
|
||||
def test_missing_manifest(self, tmp_path):
|
||||
from timmy.stack_manifest import _reset_cache, query_stack
|
||||
|
||||
_reset_cache()
|
||||
with patch("timmy.stack_manifest._MANIFEST_PATH", tmp_path / "missing.json"):
|
||||
result = query_stack()
|
||||
_reset_cache()
|
||||
assert "not found" in result.lower()
|
||||
|
||||
def test_invalid_manifest(self, tmp_path):
|
||||
from timmy.stack_manifest import _reset_cache, query_stack
|
||||
|
||||
_reset_cache()
|
||||
bad = tmp_path / "bad.json"
|
||||
bad.write_text("{broken")
|
||||
with patch("timmy.stack_manifest._MANIFEST_PATH", bad):
|
||||
result = query_stack()
|
||||
_reset_cache()
|
||||
assert "invalid JSON" in result
|
||||
|
||||
def test_empty_manifest(self, tmp_path):
|
||||
from timmy.stack_manifest import _reset_cache, query_stack
|
||||
|
||||
_reset_cache()
|
||||
path = _write_manifest(tmp_path, {"version": "1.0.0", "categories": []})
|
||||
with patch("timmy.stack_manifest._MANIFEST_PATH", path):
|
||||
result = query_stack()
|
||||
_reset_cache()
|
||||
assert "empty" in result.lower()
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Output format
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
|
||||
class TestOutputFormat:
|
||||
"""Verify output formatting."""
|
||||
|
||||
def test_includes_install_command(self, tmp_path):
|
||||
from timmy.stack_manifest import _reset_cache, query_stack
|
||||
|
||||
_reset_cache()
|
||||
path = _write_manifest(tmp_path)
|
||||
with patch("timmy.stack_manifest._MANIFEST_PATH", path):
|
||||
result = query_stack(tool="Ollama")
|
||||
_reset_cache()
|
||||
assert "Install:" in result
|
||||
assert "curl -fsSL" in result
|
||||
|
||||
def test_includes_license(self, tmp_path):
|
||||
from timmy.stack_manifest import _reset_cache, query_stack
|
||||
|
||||
_reset_cache()
|
||||
path = _write_manifest(tmp_path)
|
||||
with patch("timmy.stack_manifest._MANIFEST_PATH", path):
|
||||
result = query_stack(tool="Ollama")
|
||||
_reset_cache()
|
||||
assert "License: MIT" in result
|
||||
|
||||
def test_experimental_status_badge(self, tmp_path):
|
||||
from timmy.stack_manifest import _reset_cache, query_stack
|
||||
|
||||
_reset_cache()
|
||||
path = _write_manifest(tmp_path)
|
||||
with patch("timmy.stack_manifest._MANIFEST_PATH", path):
|
||||
result = query_stack(tool="exo-experimental")
|
||||
_reset_cache()
|
||||
assert "[EXPERIMENTAL]" in result
|
||||
|
||||
def test_includes_role(self, tmp_path):
|
||||
from timmy.stack_manifest import _reset_cache, query_stack
|
||||
|
||||
_reset_cache()
|
||||
path = _write_manifest(tmp_path)
|
||||
with patch("timmy.stack_manifest._MANIFEST_PATH", path):
|
||||
result = query_stack(tool="Agno")
|
||||
_reset_cache()
|
||||
assert "Role:" in result
|
||||
assert "Core agent framework" in result
|
||||
1356
tests/timmy/test_thinking_engine.py
Normal file
1356
tests/timmy/test_thinking_engine.py
Normal file
File diff suppressed because it is too large
Load Diff
Reference in New Issue
Block a user