diff --git a/.dockerignore b/.dockerignore index e7b8b11..930540d 100644 --- a/.dockerignore +++ b/.dockerignore @@ -33,5 +33,10 @@ tests/ docs/ *.md +# ── Deploy configs (not needed inside image) ────────────────────────────────── +deploy/ +docker-compose*.yml +Makefile + # ── macOS ───────────────────────────────────────────────────────────────────── .DS_Store diff --git a/.env.example b/.env.example index 866f447..a58439f 100644 --- a/.env.example +++ b/.env.example @@ -1,9 +1,17 @@ # Timmy Time — Mission Control # Copy this file to .env and uncomment lines you want to override. # .env is gitignored and never committed. +# +# For cloud deployment, deploy/setup.sh generates this automatically. + +# ── Cloud / Production ────────────────────────────────────────────────────── +# Your domain for automatic HTTPS via Let's Encrypt. +# Set to your actual domain (e.g., timmy.example.com) for HTTPS. +# Leave as "localhost" for IP-only HTTP access. +# DOMAIN=localhost # Ollama host (default: http://localhost:11434) -# Override if Ollama is running on another machine or port. +# In production (docker-compose.prod.yml), this is set to http://ollama:11434 automatically. # OLLAMA_URL=http://localhost:11434 # LLM model to use via Ollama (default: llama3.2) @@ -33,8 +41,24 @@ # Lightning backend: "mock" (default) | "lnd" # LIGHTNING_BACKEND=mock +# ── Environment & Privacy ─────────────────────────────────────────────────── +# Environment mode: "development" (default) | "production" +# In production, security secrets MUST be set or the app will refuse to start. +# TIMMY_ENV=development + +# Disable Agno telemetry for sovereign/air-gapped deployments. +# Default is false (disabled) to align with local-first AI vision. +# TELEMETRY_ENABLED=false + # ── Telegram bot ────────────────────────────────────────────────────────────── # Bot token from @BotFather on Telegram. # Alternatively, configure via the /telegram/setup dashboard endpoint at runtime. # Requires: pip install ".[telegram]" # TELEGRAM_TOKEN= + +# ── Discord bot ────────────────────────────────────────────────────────────── +# Bot token from https://discord.com/developers/applications +# Alternatively, configure via the /discord/setup dashboard endpoint at runtime. +# Requires: pip install ".[discord]" +# Optional: pip install pyzbar Pillow (for QR code invite detection from screenshots) +# DISCORD_TOKEN= diff --git a/.gitignore b/.gitignore index 29e629f..4423510 100644 --- a/.gitignore +++ b/.gitignore @@ -21,8 +21,12 @@ env/ # SQLite memory — never commit agent memory *.db -# Telegram bot state (contains bot token) +# Runtime PID files +.watchdog.pid + +# Chat platform state files (contain bot tokens) telegram_state.json +discord_state.json # Testing .pytest_cache/ @@ -36,5 +40,11 @@ reports/ .vscode/ *.swp *.swo -.DS_Store .claude/ + +# macOS +.DS_Store +.AppleDouble +.LSOverride +.Spotlight-V100 +.Trashes diff --git a/.handoff/CONTINUE.md b/.handoff/CONTINUE.md index 56128b3..53a5e6a 100644 --- a/.handoff/CONTINUE.md +++ b/.handoff/CONTINUE.md @@ -3,7 +3,7 @@ ## Quick Start ```bash -cd /Users/apayne/Timmy-time-dashboard && cat .handoff/CHECKPOINT.md +cd Timmy-time-dashboard && cat .handoff/CHECKPOINT.md ``` Then paste this prompt to Kimi: diff --git a/.handoff/bootstrap.sh b/.handoff/bootstrap.sh index 7d057d7..49c8d64 100755 --- a/.handoff/bootstrap.sh +++ b/.handoff/bootstrap.sh @@ -4,7 +4,7 @@ echo "=== Kimi Handoff Bootstrap ===" echo "" -cd /Users/apayne/Timmy-time-dashboard +cd "$(dirname "$0")/.." echo "📋 Current Checkpoint:" cat .handoff/CHECKPOINT.md | head -30 diff --git a/.handoff/resume.sh b/.handoff/resume.sh index fe5eb44..ede88f9 100755 --- a/.handoff/resume.sh +++ b/.handoff/resume.sh @@ -1,7 +1,7 @@ #!/bin/bash # One-liner to get status and prompt for Kimi -cd /Users/apayne/Timmy-time-dashboard +cd "$(dirname "$0")/.." echo "=== STATUS ===" git log --oneline -1 @@ -12,7 +12,7 @@ echo "" echo "=== PROMPT (copy/paste to Kimi) ===" echo "" -echo "cd /Users/apayne/Timmy-time-dashboard && cat .handoff/CHECKPOINT.md" +echo "cd Timmy-time-dashboard && cat .handoff/CHECKPOINT.md" echo "" echo "Continue from checkpoint. Read the file above and execute the NEXT TASK from .handoff/TODO.md. Run 'make test' after changes." echo "" diff --git a/AGENTS.md b/AGENTS.md index 7acbbe8..4838713 100644 --- a/AGENTS.md +++ b/AGENTS.md @@ -35,10 +35,16 @@ src/ swarm/ # Multi-agent coordinator, registry, bidder, tasks, comms docker_runner.py # Spawn agents as Docker containers timmy_serve/ # L402 Lightning proxy, payment handler, TTS, CLI + spark/ # Intelligence engine — events, predictions, advisory + creative/ # Creative director + video assembler pipeline + tools/ # Git, image, music, video tools for persona agents + lightning/ # Lightning backend abstraction (mock + LND) + agent_core/ # Substrate-agnostic agent interface voice/ # NLU intent detection (regex-based, no cloud) - websocket/ # WebSocket manager (ws_manager singleton) + ws_manager/ # WebSocket manager (ws_manager singleton) notifications/ # Push notification store (notifier singleton) shortcuts/ # Siri Shortcuts API endpoints + telegram_bot/ # Telegram bridge self_tdd/ # Continuous test watchdog tests/ # One test_*.py per module, all mocked static/ # style.css + bg.svg (arcane theme) @@ -250,7 +256,7 @@ runner.stop(info["container_id"]) ```python from dashboard.store import message_log from notifications.push import notifier -from websocket.handler import ws_manager +from ws_manager.handler import ws_manager from timmy_serve.payment_handler import payment_handler from swarm.coordinator import coordinator ``` @@ -309,9 +315,9 @@ make docker-agent # add a Local agent worker **v2.0.0 — Exodus (in progress)** - [x] Persistent swarm state across restarts - [x] Docker infrastructure for agent containers -- [ ] Implement Echo, Mace, Helm, Seer, Forge, Quill persona agents (Dockerised) +- [x] Implement Echo, Mace, Helm, Seer, Forge, Quill persona agents (+ Pixel, Lyra, Reel) +- [x] MCP tool integration for Timmy - [ ] Real LND gRPC backend for `PaymentHandler` (replace mock) -- [ ] MCP tool integration for Timmy - [ ] Marketplace frontend — wire `/marketplace` route to real data **v3.0.0 — Revelation (planned)** diff --git a/CLAUDE.md b/CLAUDE.md new file mode 100644 index 0000000..ae0f42e --- /dev/null +++ b/CLAUDE.md @@ -0,0 +1,267 @@ +# CLAUDE.md — AI Assistant Guide for Timmy Time + +This file provides context for AI assistants (Claude Code, Copilot, etc.) +working in this repository. Read this before making any changes. + +For multi-agent development standards and agent-specific conventions, see +[`AGENTS.md`](AGENTS.md). + +--- + +## Project Summary + +**Timmy Time** is a local-first, sovereign AI agent system with a browser-based +Mission Control dashboard. No cloud AI — all inference runs on localhost via +Ollama (or AirLLM for large models). Bitcoin Lightning economics are built in +for API access gating. + +**Tech stack:** Python 3.11+ · FastAPI · Jinja2 + HTMX · SQLite · Agno (agent +framework) · Ollama · pydantic-settings · WebSockets · Docker + +--- + +## Quick Reference Commands + +```bash +# Setup +make install # Create venv + install dev deps +cp .env.example .env # Configure environment + +# Development +make dev # Start dashboard at http://localhost:8000 +make test # Run full test suite (no Ollama needed) +make test-cov # Tests + coverage report (terminal + XML) +make lint # Run ruff or flake8 + +# Docker +make docker-build # Build timmy-time:latest image +make docker-up # Start dashboard container +make docker-agent # Spawn one agent worker +make docker-down # Stop all containers +``` + +--- + +## Project Layout + +``` +src/ + config.py # Central pydantic-settings (all env vars) + timmy/ # Core agent: agent.py, backends.py, cli.py, prompts.py + dashboard/ # FastAPI app + routes + Jinja2 templates + app.py # App factory, lifespan, router registration + store.py # In-memory MessageLog singleton + routes/ # One file per route group (agents, health, swarm, etc.) + templates/ # base.html + page templates + partials/ + swarm/ # Multi-agent coordinator, registry, bidder, tasks, comms + coordinator.py # Central swarm orchestrator (security-sensitive) + docker_runner.py # Spawn agents as Docker containers + timmy_serve/ # L402 Lightning proxy, payment handler, TTS, CLI + spark/ # Intelligence engine — events, predictions, advisory + creative/ # Creative director + video assembler pipeline + tools/ # Git, image, music, video tools for persona agents + lightning/ # Lightning backend abstraction (mock + LND) + agent_core/ # Substrate-agnostic agent interface + voice/ # NLU intent detection (regex-based, local) + ws_manager/ # WebSocket connection manager (ws_manager singleton) + notifications/ # Push notification store (notifier singleton) + shortcuts/ # Siri Shortcuts API endpoints + telegram_bot/ # Telegram bridge + self_tdd/ # Continuous test watchdog +tests/ # One test_*.py per module, all mocked +static/ # style.css + bg.svg (dark arcane theme) +docs/ # GitHub Pages landing site +``` + +--- + +## Architecture Patterns + +### Config access + +All configuration goes through `src/config.py` using pydantic-settings: + +```python +from config import settings +url = settings.ollama_url # never use os.environ.get() directly in app code +``` + +Environment variables are read from `.env` automatically. See `.env.example` for +all available settings. + +### Singletons + +Core services are module-level singleton instances imported directly: + +```python +from dashboard.store import message_log +from notifications.push import notifier +from ws_manager.handler import ws_manager +from timmy_serve.payment_handler import payment_handler +from swarm.coordinator import coordinator +``` + +### HTMX response pattern + +Routes return Jinja2 template partials for HTMX requests: + +```python +return templates.TemplateResponse( + "partials/chat_message.html", + {"request": request, "role": "user", "content": message} +) +``` + +### Graceful degradation + +Optional services (Ollama, Redis, AirLLM) degrade gracefully — log the error, +return a fallback, never crash: + +```python +try: + result = await some_optional_service() +except Exception: + result = fallback_value +``` + +### Route registration + +New routes go in `src/dashboard/routes/.py`, then register the router in +`src/dashboard/app.py`: + +```python +from dashboard.routes. import router as _router +app.include_router(_router) +``` + +--- + +## Testing + +### Running tests + +```bash +make test # Quick run (pytest -q --tb=short) +make test-cov # With coverage (term-missing + XML) +make test-cov-html # With HTML coverage report +``` + +No Ollama or external services needed — all heavy dependencies are mocked. + +### Test conventions + +- **One test file per module:** `tests/test_.py` +- **Stubs in conftest:** `agno`, `airllm`, `pyttsx3`, `telegram` are stubbed in + `tests/conftest.py` using `sys.modules.setdefault()` so tests run without + those packages installed +- **Test mode:** `TIMMY_TEST_MODE=1` is set automatically in conftest to disable + auto-spawning of persona agents during tests +- **FastAPI testing:** Use the `client` fixture (wraps `TestClient`) +- **Database isolation:** SQLite files in `data/` are cleaned between tests; + coordinator state is reset via autouse fixtures +- **Async:** `asyncio_mode = "auto"` in pytest config — async test functions + are detected automatically +- **Coverage threshold:** CI fails if coverage drops below 60% + (`fail_under = 60` in `pyproject.toml`) + +### Adding a new test + +```python +# tests/test_my_feature.py +from fastapi.testclient import TestClient + +def test_my_endpoint(client): + response = client.get("/my-endpoint") + assert response.status_code == 200 +``` + +--- + +## CI/CD + +GitHub Actions workflow (`.github/workflows/tests.yml`): + +- Runs on every push and pull request to all branches +- Python 3.11, installs `.[dev]` dependencies +- Runs pytest with coverage + JUnit XML output +- Publishes test results as PR comments and check annotations +- Uploads coverage XML as a downloadable artifact (14-day retention) + +--- + +## Key Conventions + +1. **Tests must stay green.** Run `make test` before committing. +2. **No cloud AI dependencies.** All inference runs on localhost. +3. **No new top-level files without purpose.** Keep the root directory clean. +4. **Follow existing patterns** — singletons, graceful degradation, + pydantic-settings config. +5. **Security defaults:** Never hard-code secrets. Warn at startup when using + default values. +6. **XSS prevention:** Never use `innerHTML` with untrusted content. +7. **Keep routes thin** — business logic lives in the module, not the route. +8. **Prefer editing existing files** over creating new ones. +9. **Use `from config import settings`** for all env-var access. +10. **Every new module gets a test:** `tests/test_.py`. + +--- + +## Entry Points + +Three CLI commands are installed via `pyproject.toml`: + +| Command | Module | Purpose | +|---------|--------|---------| +| `timmy` | `src/timmy/cli.py` | Chat, think, status commands | +| `timmy-serve` | `src/timmy_serve/cli.py` | L402-gated API server (port 8402) | +| `self-tdd` | `src/self_tdd/watchdog.py` | Continuous test watchdog | + +--- + +## Environment Variables + +Key variables (full list in `.env.example`): + +| Variable | Default | Purpose | +|----------|---------|---------| +| `OLLAMA_URL` | `http://localhost:11434` | Ollama host | +| `OLLAMA_MODEL` | `llama3.2` | Model served by Ollama | +| `DEBUG` | `false` | Enable `/docs` and `/redoc` | +| `TIMMY_MODEL_BACKEND` | `ollama` | `ollama` / `airllm` / `auto` | +| `AIRLLM_MODEL_SIZE` | `70b` | `8b` / `70b` / `405b` | +| `L402_HMAC_SECRET` | *(change in prod)* | HMAC signing for invoices | +| `L402_MACAROON_SECRET` | *(change in prod)* | Macaroon signing | +| `LIGHTNING_BACKEND` | `mock` | `mock` / `lnd` | +| `SPARK_ENABLED` | `true` | Enable Spark intelligence engine | +| `TELEGRAM_TOKEN` | *(empty)* | Telegram bot token | + +--- + +## Persistence + +- `timmy.db` — Agno agent memory (SQLite, project root) +- `data/swarm.db` — Swarm registry + tasks (SQLite, `data/` directory) +- All `.db` files are gitignored — never commit database files + +--- + +## Docker + +Containers share a `data/` volume for SQLite. Container agents communicate with +the coordinator over HTTP (not in-memory `SwarmComms`): + +``` +GET /internal/tasks → list tasks open for bidding +POST /internal/bids → submit a bid +``` + +`COORDINATOR_URL=http://dashboard:8000` is set automatically by docker-compose. + +--- + +## Security-Sensitive Areas + +- `src/swarm/coordinator.py` — requires review before changes +- `src/timmy_serve/l402_proxy.py` — Lightning payment gating +- `src/lightning/` — payment backend abstraction +- Any file handling secrets or authentication tokens diff --git a/Dockerfile b/Dockerfile index 1a61121..6ac6daa 100644 --- a/Dockerfile +++ b/Dockerfile @@ -11,42 +11,43 @@ # timmy-time:latest \ # python -m swarm.agent_runner --agent-id w1 --name Worker-1 -FROM python:3.12-slim +FROM python:3.12-slim AS base # ── System deps ────────────────────────────────────────────────────────────── RUN apt-get update && apt-get install -y --no-install-recommends \ - gcc curl \ + gcc curl fonts-dejavu-core \ && rm -rf /var/lib/apt/lists/* WORKDIR /app # ── Python deps (install before copying src for layer caching) ─────────────── +# Copy only pyproject.toml first so Docker can cache the dep-install layer. +# The editable install (-e) happens after src is copied below. COPY pyproject.toml . -# Install production deps only (no dev/test extras in the image) -RUN pip install --no-cache-dir \ - "fastapi>=0.115.0" \ - "uvicorn[standard]>=0.32.0" \ - "jinja2>=3.1.0" \ - "httpx>=0.27.0" \ - "python-multipart>=0.0.12" \ - "aiofiles>=24.0.0" \ - "typer>=0.12.0" \ - "rich>=13.0.0" \ - "pydantic-settings>=2.0.0" \ - "websockets>=12.0" \ - "agno[sqlite]>=1.4.0" \ - "ollama>=0.3.0" \ - "openai>=1.0.0" \ - "python-telegram-bot>=21.0" +# Create a minimal src layout so `pip install` can resolve the package metadata +# without copying the full source tree (preserves Docker layer caching). +RUN mkdir -p src/timmy src/timmy_serve src/self_tdd src/dashboard && \ + touch src/timmy/__init__.py src/timmy/cli.py \ + src/timmy_serve/__init__.py src/timmy_serve/cli.py \ + src/self_tdd/__init__.py src/self_tdd/watchdog.py \ + src/dashboard/__init__.py src/config.py + +RUN pip install --no-cache-dir -e ".[swarm,telegram]" # ── Application source ─────────────────────────────────────────────────────── +# Overwrite the stubs with real source code COPY src/ ./src/ COPY static/ ./static/ # Create data directory (mounted as a volume in production) RUN mkdir -p /app/data +# ── Non-root user for production ───────────────────────────────────────────── +RUN groupadd -r timmy && useradd -r -g timmy -d /app -s /sbin/nologin timmy \ + && chown -R timmy:timmy /app +USER timmy + # ── Environment ────────────────────────────────────────────────────────────── ENV PYTHONPATH=/app/src ENV PYTHONUNBUFFERED=1 @@ -54,5 +55,9 @@ ENV PYTHONDONTWRITEBYTECODE=1 EXPOSE 8000 +# ── Healthcheck ────────────────────────────────────────────────────────────── +HEALTHCHECK --interval=30s --timeout=5s --start-period=15s --retries=3 \ + CMD curl -f http://localhost:8000/health || exit 1 + # ── Default: run the dashboard ─────────────────────────────────────────────── CMD ["uvicorn", "dashboard.app:app", "--host", "0.0.0.0", "--port", "8000"] diff --git a/LICENSE b/LICENSE index 261eeb9..16e48f0 100644 --- a/LICENSE +++ b/LICENSE @@ -1,201 +1,21 @@ - Apache License - Version 2.0, January 2004 - http://www.apache.org/licenses/ +MIT License - TERMS AND CONDITIONS FOR USE, REPRODUCTION, AND DISTRIBUTION +Copyright (c) 2026 Alexander Whitestone - 1. Definitions. +Permission is hereby granted, free of charge, to any person obtaining a copy +of this software and associated documentation files (the "Software"), to deal +in the Software without restriction, including without limitation the rights +to use, copy, modify, merge, publish, distribute, sublicense, and/or sell +copies of the Software, and to permit persons to whom the Software is +furnished to do so, subject to the following conditions: - "License" shall mean the terms and conditions for use, reproduction, - and distribution as defined by Sections 1 through 9 of this document. +The above copyright notice and this permission notice shall be included in all +copies or substantial portions of the Software. - "Licensor" shall mean the copyright owner or entity authorized by - the copyright owner that is granting the License. - - "Legal Entity" shall mean the union of the acting entity and all - other entities that control, are controlled by, or are under common - control with that entity. For the purposes of this definition, - "control" means (i) the power, direct or indirect, to cause the - direction or management of such entity, whether by contract or - otherwise, or (ii) ownership of fifty percent (50%) or more of the - outstanding shares, or (iii) beneficial ownership of such entity. - - "You" (or "Your") shall mean an individual or Legal Entity - exercising permissions granted by this License. - - "Source" form shall mean the preferred form for making modifications, - including but not limited to software source code, documentation - source, and configuration files. - - "Object" form shall mean any form resulting from mechanical - transformation or translation of a Source form, including but - not limited to compiled object code, generated documentation, - and conversions to other media types. - - "Work" shall mean the work of authorship, whether in Source or - Object form, made available under the License, as indicated by a - copyright notice that is included in or attached to the work - (an example is provided in the Appendix below). - - "Derivative Works" shall mean any work, whether in Source or Object - form, that is based on (or derived from) the Work and for which the - editorial revisions, annotations, elaborations, or other modifications - represent, as a whole, an original work of authorship. For the purposes - of this License, Derivative Works shall not include works that remain - separable from, or merely link (or bind by name) to the interfaces of, - the Work and Derivative Works thereof. - - "Contribution" shall mean any work of authorship, including - the original version of the Work and any modifications or additions - to that Work or Derivative Works thereof, that is intentionally - submitted to Licensor for inclusion in the Work by the copyright owner - or by an individual or Legal Entity authorized to submit on behalf of - the copyright owner. For the purposes of this definition, "submitted" - means any form of electronic, verbal, or written communication sent - to the Licensor or its representatives, including but not limited to - communication on electronic mailing lists, source code control systems, - and issue tracking systems that are managed by, or on behalf of, the - Licensor for the purpose of discussing and improving the Work, but - excluding communication that is conspicuously marked or otherwise - designated in writing by the copyright owner as "Not a Contribution." - - "Contributor" shall mean Licensor and any individual or Legal Entity - on behalf of whom a Contribution has been received by Licensor and - subsequently incorporated within the Work. - - 2. Grant of Copyright License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - copyright license to reproduce, prepare Derivative Works of, - publicly display, publicly perform, sublicense, and distribute the - Work and such Derivative Works in Source or Object form. - - 3. Grant of Patent License. Subject to the terms and conditions of - this License, each Contributor hereby grants to You a perpetual, - worldwide, non-exclusive, no-charge, royalty-free, irrevocable - (except as stated in this section) patent license to make, have made, - use, offer to sell, sell, import, and otherwise transfer the Work, - where such license applies only to those patent claims licensable - by such Contributor that are necessarily infringed by their - Contribution(s) alone or by combination of their Contribution(s) - with the Work to which such Contribution(s) was submitted. If You - institute patent litigation against any entity (including a - cross-claim or counterclaim in a lawsuit) alleging that the Work - or a Contribution incorporated within the Work constitutes direct - or contributory patent infringement, then any patent licenses - granted to You under this License for that Work shall terminate - as of the date such litigation is filed. - - 4. Redistribution. You may reproduce and distribute copies of the - Work or Derivative Works thereof in any medium, with or without - modifications, and in Source or Object form, provided that You - meet the following conditions: - - (a) You must give any other recipients of the Work or - Derivative Works a copy of this License; and - - (b) You must cause any modified files to carry prominent notices - stating that You changed the files; and - - (c) You must retain, in the Source form of any Derivative Works - that You distribute, all copyright, patent, trademark, and - attribution notices from the Source form of the Work, - excluding those notices that do not pertain to any part of - the Derivative Works; and - - (d) If the Work includes a "NOTICE" text file as part of its - distribution, then any Derivative Works that You distribute must - include a readable copy of the attribution notices contained - within such NOTICE file, excluding those notices that do not - pertain to any part of the Derivative Works, in at least one - of the following places: within a NOTICE text file distributed - as part of the Derivative Works; within the Source form or - documentation, if provided along with the Derivative Works; or, - within a display generated by the Derivative Works, if and - wherever such third-party notices normally appear. The contents - of the NOTICE file are for informational purposes only and - do not modify the License. You may add Your own attribution - notices within Derivative Works that You distribute, alongside - or as an addendum to the NOTICE text from the Work, provided - that such additional attribution notices cannot be construed - as modifying the License. - - You may add Your own copyright statement to Your modifications and - may provide additional or different license terms and conditions - for use, reproduction, or distribution of Your modifications, or - for any such Derivative Works as a whole, provided Your use, - reproduction, and distribution of the Work otherwise complies with - the conditions stated in this License. - - 5. Submission of Contributions. Unless You explicitly state otherwise, - any Contribution intentionally submitted for inclusion in the Work - by You to the Licensor shall be under the terms and conditions of - this License, without any additional terms or conditions. - Notwithstanding the above, nothing herein shall supersede or modify - the terms of any separate license agreement you may have executed - with Licensor regarding such Contributions. - - 6. Trademarks. This License does not grant permission to use the trade - names, trademarks, service marks, or product names of the Licensor, - except as required for reasonable and customary use in describing the - origin of the Work and reproducing the content of the NOTICE file. - - 7. Disclaimer of Warranty. Unless required by applicable law or - agreed to in writing, Licensor provides the Work (and each - Contributor provides its Contributions) on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or - implied, including, without limitation, any warranties or conditions - of TITLE, NON-INFRINGEMENT, MERCHANTABILITY, or FITNESS FOR A - PARTICULAR PURPOSE. You are solely responsible for determining the - appropriateness of using or redistributing the Work and assume any - risks associated with Your exercise of permissions under this License. - - 8. Limitation of Liability. In no event and under no legal theory, - whether in tort (including negligence), contract, or otherwise, - unless required by applicable law (such as deliberate and grossly - negligent acts) or agreed to in writing, shall any Contributor be - liable to You for damages, including any direct, indirect, special, - incidental, or consequential damages of any character arising as a - result of this License or out of the use or inability to use the - Work (including but not limited to damages for loss of goodwill, - work stoppage, computer failure or malfunction, or any and all - other commercial damages or losses), even if such Contributor - has been advised of the possibility of such damages. - - 9. Accepting Warranty or Additional Liability. While redistributing - the Work or Derivative Works thereof, You may choose to offer, - and charge a fee for, acceptance of support, warranty, indemnity, - or other liability obligations and/or rights consistent with this - License. However, in accepting such obligations, You may act only - on Your own behalf and on Your sole responsibility, not on behalf - of any other Contributor, and only if You agree to indemnify, - defend, and hold each Contributor harmless for any liability - incurred by, or claims asserted against, such Contributor by reason - of your accepting any such warranty or additional liability. - - END OF TERMS AND CONDITIONS - - APPENDIX: How to apply the Apache License to your work. - - To apply the Apache License to your work, attach the following - boilerplate notice, with the fields enclosed by brackets "[]" - replaced with your own identifying information. (Don't include - the brackets!) The text should be enclosed in the appropriate - comment syntax for the file format. We also recommend that a - file or class name and description of purpose be included on the - same "printed page" as the copyright notice for easier - identification within third-party archives. - - Copyright [yyyy] [name of copyright owner] - - Licensed under the Apache License, Version 2.0 (the "License"); - you may not use this file except in compliance with the License. - You may obtain a copy of the License at - - http://www.apache.org/licenses/LICENSE-2.0 - - Unless required by applicable law or agreed to in writing, software - distributed under the License is distributed on an "AS IS" BASIS, - WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied. - See the License for the specific language governing permissions and - limitations under the License. +THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR +IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, +FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE +AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER +LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, +OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE +SOFTWARE. diff --git a/Makefile b/Makefile index 6ed068c..5f6a2f9 100644 --- a/Makefile +++ b/Makefile @@ -1,5 +1,7 @@ -.PHONY: install install-bigbrain dev test test-cov test-cov-html watch lint clean help \ - docker-build docker-up docker-down docker-agent docker-logs docker-shell +.PHONY: install install-bigbrain install-creative dev nuke test test-cov test-cov-html watch lint clean help \ + up down logs \ + docker-build docker-up docker-down docker-agent docker-logs docker-shell \ + cloud-deploy cloud-up cloud-down cloud-logs cloud-status cloud-update VENV := .venv PYTHON := $(VENV)/bin/python @@ -23,13 +25,39 @@ install-bigbrain: $(VENV)/bin/activate echo "✓ AirLLM installed (PyTorch backend)"; \ fi +install-creative: $(VENV)/bin/activate + $(PIP) install --quiet -e ".[dev,creative]" + @if [ "$$(uname -m)" = "arm64" ] && [ "$$(uname -s)" = "Darwin" ]; then \ + echo " Apple Silicon detected — installing PyTorch with Metal (MPS) support..."; \ + $(PIP) install --quiet --pre torch torchvision torchaudio \ + --index-url https://download.pytorch.org/whl/nightly/cpu; \ + echo "✓ Creative extras installed with Metal GPU acceleration"; \ + else \ + echo "✓ Creative extras installed (diffusers, torch, ace-step)"; \ + fi + $(VENV)/bin/activate: python3 -m venv $(VENV) # ── Development ─────────────────────────────────────────────────────────────── -dev: - $(UVICORN) dashboard.app:app --reload --host 0.0.0.0 --port 8000 +dev: nuke + PYTHONDONTWRITEBYTECODE=1 $(UVICORN) dashboard.app:app --reload --host 0.0.0.0 --port 8000 + +# Kill anything on port 8000, stop Docker containers, clear stale state. +# Safe to run anytime — idempotent, never errors out. +nuke: + @echo " Cleaning up dev environment..." + @# Stop Docker containers (if any are running) + @docker compose down --remove-orphans 2>/dev/null || true + @# Kill any process holding port 8000 (errno 48 fix) + @lsof -ti :8000 | xargs kill -9 2>/dev/null || true + @# Purge stale bytecache to prevent loading old .pyc files + @find . -type d -name __pycache__ -exec rm -rf {} + 2>/dev/null || true + @find . -name "*.pyc" -delete 2>/dev/null || true + @# Brief pause to let the OS release the socket + @sleep 0.5 + @echo " ✓ Port 8000 free, containers stopped, caches cleared" # Print the local IP addresses your phone can use to reach this machine. # Connect your phone to the same hotspot your Mac is sharing from, @@ -40,10 +68,15 @@ ip: @echo "" @echo " Open one of these on your phone: http://:8000" @echo "" - @ipconfig getifaddr en0 2>/dev/null | awk '{print " en0 (Wi-Fi): http://" $$1 ":8000"}' || true - @ipconfig getifaddr en1 2>/dev/null | awk '{print " en1 (Ethernet): http://" $$1 ":8000"}' || true - @ipconfig getifaddr en2 2>/dev/null | awk '{print " en2: http://" $$1 ":8000"}' || true - @ifconfig 2>/dev/null | awk '/inet / && !/127\.0\.0\.1/ && !/::1/{print " " $$2 " → http://" $$2 ":8000"}' | head -5 || true + @if [ "$$(uname -s)" = "Darwin" ]; then \ + ipconfig getifaddr en0 2>/dev/null | awk '{print " en0 (Wi-Fi): http://" $$1 ":8000"}' || true; \ + ipconfig getifaddr en1 2>/dev/null | awk '{print " en1 (Ethernet): http://" $$1 ":8000"}' || true; \ + ipconfig getifaddr en2 2>/dev/null | awk '{print " en2: http://" $$1 ":8000"}' || true; \ + fi + @# Generic fallback — works on both macOS and Linux + @ifconfig 2>/dev/null | awk '/inet / && !/127\.0\.0\.1/ && !/::1/{print " " $$2 " → http://" $$2 ":8000"}' | head -5 \ + || ip -4 addr show 2>/dev/null | awk '/inet / && !/127\.0\.0\.1/{split($$2,a,"/"); print " " a[1] " → http://" a[1] ":8000"}' | head -5 \ + || true @echo "" watch: @@ -61,6 +94,12 @@ test-cov-html: $(PYTEST) tests/ --cov=src --cov-report=term-missing --cov-report=html -q @echo "✓ HTML coverage report: open htmlcov/index.html" +# Full-stack functional test: spins up Ollama (CPU, qwen2.5:0.5b) + dashboard +# in Docker and verifies real LLM chat end-to-end. +# Override model: make test-ollama OLLAMA_TEST_MODEL=tinyllama +test-ollama: + FUNCTIONAL_DOCKER=1 $(PYTEST) tests/functional/test_ollama_chat.py -v --tb=long -x + # ── Code quality ────────────────────────────────────────────────────────────── lint: @@ -70,6 +109,33 @@ lint: # ── Housekeeping ────────────────────────────────────────────────────────────── +# ── One-command startup ────────────────────────────────────────────────────── +# make up build + start everything in Docker +# make up DEV=1 same, with hot-reload on Python/template/CSS changes + +up: + mkdir -p data +ifdef DEV + docker compose -f docker-compose.yml -f docker-compose.dev.yml up -d --build + @echo "" + @echo " ✓ Timmy Time running in DEV mode at http://localhost:8000" + @echo " Hot-reload active — Python, template, and CSS changes auto-apply" + @echo " Logs: make logs" + @echo "" +else + docker compose up -d --build + @echo "" + @echo " ✓ Timmy Time running at http://localhost:8000" + @echo " Logs: make logs" + @echo "" +endif + +down: + docker compose down + +logs: + docker compose logs -f + # ── Docker ──────────────────────────────────────────────────────────────────── docker-build: @@ -95,6 +161,45 @@ docker-logs: docker-shell: docker compose exec dashboard bash +# ── Cloud Deploy ───────────────────────────────────────────────────────────── + +# One-click production deployment (run on your cloud server) +cloud-deploy: + @bash deploy/setup.sh + +# Start the production stack (Caddy + Ollama + Dashboard + Timmy) +cloud-up: + docker compose -f docker-compose.prod.yml up -d + +# Stop the production stack +cloud-down: + docker compose -f docker-compose.prod.yml down + +# Tail production logs +cloud-logs: + docker compose -f docker-compose.prod.yml logs -f + +# Show status of all production containers +cloud-status: + docker compose -f docker-compose.prod.yml ps + +# Pull latest code and rebuild +cloud-update: + git pull + docker compose -f docker-compose.prod.yml up -d --build + +# Create a DigitalOcean droplet (requires doctl CLI) +cloud-droplet: + @bash deploy/digitalocean/create-droplet.sh + +# Scale agent workers in production: make cloud-scale N=4 +cloud-scale: + docker compose -f docker-compose.prod.yml --profile agents up -d --scale agent=$${N:-2} + +# Pull a model into Ollama: make cloud-pull-model MODEL=llama3.2 +cloud-pull-model: + docker exec timmy-ollama ollama pull $${MODEL:-llama3.2} + # ── Housekeeping ────────────────────────────────────────────────────────────── clean: @@ -105,9 +210,20 @@ clean: help: @echo "" + @echo " Quick Start" + @echo " ─────────────────────────────────────────────────" + @echo " make up build + start everything in Docker" + @echo " make up DEV=1 same, with hot-reload on file changes" + @echo " make down stop all containers" + @echo " make logs tail container logs" + @echo "" + @echo " Local Development" + @echo " ─────────────────────────────────────────────────" @echo " make install create venv + install dev deps" @echo " make install-bigbrain install with AirLLM (big-model backend)" - @echo " make dev start dashboard at http://localhost:8000" + @echo " make install-creative install with creative extras (torch, diffusers)" + @echo " make dev clean up + start dashboard (auto-fixes errno 48)" + @echo " make nuke kill port 8000, stop containers, reset state" @echo " make ip print local IP addresses for phone testing" @echo " make test run all tests" @echo " make test-cov tests + coverage report (terminal + XML)" @@ -116,6 +232,8 @@ help: @echo " make lint run ruff or flake8" @echo " make clean remove build artefacts and caches" @echo "" + @echo " Docker (Advanced)" + @echo " ─────────────────────────────────────────────────" @echo " make docker-build build the timmy-time:latest image" @echo " make docker-up start dashboard container" @echo " make docker-agent add one agent worker (AGENT_NAME=Echo)" @@ -123,3 +241,15 @@ help: @echo " make docker-logs tail container logs" @echo " make docker-shell open a bash shell in the dashboard container" @echo "" + @echo " Cloud Deploy (Production)" + @echo " ─────────────────────────────────────────────────" + @echo " make cloud-deploy one-click server setup (run as root)" + @echo " make cloud-up start production stack" + @echo " make cloud-down stop production stack" + @echo " make cloud-logs tail production logs" + @echo " make cloud-status show container status" + @echo " make cloud-update pull + rebuild from git" + @echo " make cloud-droplet create DigitalOcean droplet (needs doctl)" + @echo " make cloud-scale N=4 scale agent workers" + @echo " make cloud-pull-model MODEL=llama3.2 pull LLM model" + @echo "" diff --git a/PLAN.md b/PLAN.md new file mode 100644 index 0000000..a54ab1b --- /dev/null +++ b/PLAN.md @@ -0,0 +1,478 @@ +# Plan: Full Creative & DevOps Capabilities for Timmy + +## Overview + +Add five major capability domains to Timmy's agent system, turning it into a +sovereign creative studio and full-stack DevOps operator. All tools are +open-source, self-hosted, and GPU-accelerated where needed. + +--- + +## Phase 1: Git & DevOps Tools (Forge + Helm personas) + +**Goal:** Timmy can observe local/remote repos, read code, create branches, +stage changes, commit, diff, log, and manage PRs — all through the swarm +task system with Spark event capture. + +### New module: `src/tools/git_tools.py` + +Tools to add (using **GitPython** — BSD-3, `pip install GitPython`): + +| Tool | Function | Persona Access | +|---|---|---| +| `git_clone` | Clone a remote repo to local path | Forge, Helm | +| `git_status` | Show working tree status | Forge, Helm, Timmy | +| `git_diff` | Show staged/unstaged diffs | Forge, Helm, Timmy | +| `git_log` | Show recent commit history | Forge, Helm, Echo, Timmy | +| `git_branch` | List/create/switch branches | Forge, Helm | +| `git_add` | Stage files for commit | Forge, Helm | +| `git_commit` | Create a commit with message | Forge, Helm | +| `git_push` | Push to remote | Forge, Helm | +| `git_pull` | Pull from remote | Forge, Helm | +| `git_blame` | Show line-by-line authorship | Forge, Echo | +| `git_stash` | Stash/pop changes | Forge, Helm | + +### Changes to existing files + +- **`src/timmy/tools.py`** — Add `create_git_tools()` factory, wire into + `PERSONA_TOOLKITS` for Forge and Helm +- **`src/swarm/tool_executor.py`** — Enhance `_infer_tools_needed()` with + git keywords (commit, branch, push, pull, diff, clone, merge) +- **`src/config.py`** — Add `git_default_repo_dir: str = "~/repos"` setting +- **`src/spark/engine.py`** — Add `on_tool_executed()` method to capture + individual tool invocations (not just task-level events) +- **`src/swarm/personas.py`** — Add git-related keywords to Forge and Helm + preferred_keywords + +### New dependency + +```toml +# pyproject.toml +dependencies = [ + ..., + "GitPython>=3.1.40", +] +``` + +### Dashboard + +- **`/tools`** page updated to show git tools in the catalog +- Git tool usage stats visible per agent + +### Tests + +- `tests/test_git_tools.py` — test all git tool functions against tmp repos +- Mock GitPython's `Repo` class for unit tests + +--- + +## Phase 2: Image Generation (new "Pixel" persona) + +**Goal:** Generate storyboard frames and standalone images from text prompts +using FLUX.2 Klein 4B locally. + +### New persona: Pixel — Visual Architect + +```python +"pixel": { + "id": "pixel", + "name": "Pixel", + "role": "Visual Architect", + "description": "Image generation, storyboard frames, and visual design.", + "capabilities": "image-generation,storyboard,design", + "rate_sats": 80, + "bid_base": 60, + "bid_jitter": 20, + "preferred_keywords": [ + "image", "picture", "photo", "draw", "illustration", + "storyboard", "frame", "visual", "design", "generate", + "portrait", "landscape", "scene", "artwork", + ], +} +``` + +### New module: `src/tools/image_tools.py` + +Tools (using **diffusers** + **FLUX.2 Klein 4B** — Apache 2.0): + +| Tool | Function | +|---|---| +| `generate_image` | Text-to-image generation (returns file path) | +| `generate_storyboard` | Generate N frames from scene descriptions | +| `image_variations` | Generate variations of an existing image | + +### Architecture + +``` +generate_image(prompt, width=1024, height=1024, steps=4) + → loads FLUX.2 Klein via diffusers FluxPipeline + → saves to data/images/{uuid}.png + → returns path + metadata +``` + +- Model loaded lazily on first use, kept in memory for subsequent calls +- Falls back to CPU generation (slower) if no GPU +- Output saved to `data/images/` with metadata JSON sidecar + +### New dependency (optional extra) + +```toml +[project.optional-dependencies] +creative = [ + "diffusers>=0.30.0", + "transformers>=4.40.0", + "accelerate>=0.30.0", + "torch>=2.2.0", + "safetensors>=0.4.0", +] +``` + +### Config + +```python +# config.py additions +flux_model_id: str = "black-forest-labs/FLUX.2-klein-4b" +image_output_dir: str = "data/images" +image_default_steps: int = 4 +``` + +### Dashboard + +- `/creative/ui` — new Creative Studio page (image gallery + generation form) +- HTMX-powered: submit prompt, poll for result, display inline +- Gallery view of all generated images with metadata + +### Tests + +- `tests/test_image_tools.py` — mock diffusers pipeline, test prompt handling, + file output, storyboard generation + +--- + +## Phase 3: Music Generation (new "Lyra" persona) + +**Goal:** Generate full songs with vocals, instrumentals, and lyrics using +ACE-Step 1.5 locally. + +### New persona: Lyra — Sound Weaver + +```python +"lyra": { + "id": "lyra", + "name": "Lyra", + "role": "Sound Weaver", + "description": "Music and song generation with vocals, instrumentals, and lyrics.", + "capabilities": "music-generation,vocals,composition", + "rate_sats": 90, + "bid_base": 70, + "bid_jitter": 20, + "preferred_keywords": [ + "music", "song", "sing", "vocal", "instrumental", + "melody", "beat", "track", "compose", "lyrics", + "audio", "sound", "album", "remix", + ], +} +``` + +### New module: `src/tools/music_tools.py` + +Tools (using **ACE-Step 1.5** — Apache 2.0, `pip install ace-step`): + +| Tool | Function | +|---|---| +| `generate_song` | Text/lyrics → full song (vocals + instrumentals) | +| `generate_instrumental` | Text prompt → instrumental track | +| `generate_vocals` | Lyrics + style → vocal track | +| `list_genres` | Return supported genre/style tags | + +### Architecture + +``` +generate_song(lyrics, genre="pop", duration=120, language="en") + → loads ACE-Step model (lazy, cached) + → generates audio + → saves to data/music/{uuid}.wav + → returns path + metadata (duration, genre, etc.) +``` + +- Model loaded lazily, ~4GB VRAM minimum +- Output saved to `data/music/` with metadata sidecar +- Supports 19 languages, genre tags, tempo control + +### New dependency (optional extra, extends `creative`) + +```toml +[project.optional-dependencies] +creative = [ + ..., + "ace-step>=1.5.0", +] +``` + +### Config + +```python +music_output_dir: str = "data/music" +ace_step_model: str = "ace-step/ACE-Step-v1.5" +``` + +### Dashboard + +- `/creative/ui` expanded with Music tab +- Audio player widget (HTML5 `