audit: clean Docker architecture, consolidate test fixtures, add containerized test runner (#94)

This commit is contained in:
Alexander Whitestone
2026-02-28 16:11:58 -05:00
committed by GitHub
parent 1e19164379
commit d7d7a5a80a
24 changed files with 700 additions and 494 deletions

View File

@@ -1,49 +0,0 @@
# ── Ollama with Pre-loaded Models ──────────────────────────────────────────────
#
# This Dockerfile extends the official Ollama image with pre-loaded models
# for faster startup and better performance.
#
# Build: docker build -f Dockerfile.ollama -t timmy-ollama:latest .
# Run: docker run -p 11434:11434 -v ollama-data:/root/.ollama timmy-ollama:latest
FROM ollama/ollama:latest
# Set environment variables
ENV OLLAMA_HOST=0.0.0.0:11434
# Create a startup script that pulls models on first run
RUN mkdir -p /app
COPY <<EOF /app/init-models.sh
#!/bin/bash
set -e
echo "🚀 Ollama startup — checking for models..."
# Start Ollama in the background
ollama serve &
OLLAMA_PID=$!
# Wait for Ollama to be ready
echo "⏳ Waiting for Ollama to be ready..."
for i in {1..30}; do
if curl -s http://localhost:11434/api/tags > /dev/null 2>&1; then
echo "✓ Ollama is ready"
break
fi
sleep 1
done
# Pull the default model if not already present
echo "📥 Pulling llama3.2 model..."
ollama pull llama3.2 || true
echo "✓ Ollama initialization complete"
# Keep the process running
wait $OLLAMA_PID
EOF
RUN chmod +x /app/init-models.sh
# Use the init script as the entrypoint
ENTRYPOINT ["/app/init-models.sh"]

View File

@@ -1,7 +1,9 @@
.PHONY: install install-bigbrain dev nuke fresh test test-cov test-cov-html watch lint clean help \ .PHONY: install install-bigbrain dev nuke fresh test test-cov test-cov-html watch lint clean help \
up down logs \ up down logs \
docker-build docker-up docker-down docker-agent docker-logs docker-shell \ docker-build docker-up docker-down docker-agent docker-logs docker-shell \
cloud-deploy cloud-up cloud-down cloud-logs cloud-status cloud-update test-docker test-docker-cov test-docker-functional test-docker-build test-docker-down \
cloud-deploy cloud-up cloud-down cloud-logs cloud-status cloud-update \
logs-up logs-down logs-kibana
PYTEST := poetry run pytest PYTEST := poetry run pytest
UVICORN := poetry run uvicorn UVICORN := poetry run uvicorn
@@ -114,6 +116,38 @@ test-cov-html:
test-ollama: test-ollama:
FUNCTIONAL_DOCKER=1 $(PYTEST) tests/functional/test_ollama_chat.py -v --tb=long -x FUNCTIONAL_DOCKER=1 $(PYTEST) tests/functional/test_ollama_chat.py -v --tb=long -x
# ── Docker test containers ───────────────────────────────────────────────────
# Clean containers from cached images; source bind-mounted for fast iteration.
# Rebuild only needed when pyproject.toml / poetry.lock change.
# Build the test image (cached — fast unless deps change)
test-docker-build:
DOCKER_BUILDKIT=1 docker compose -f docker-compose.test.yml build
# Run all unit + integration tests in a clean container (default)
# Override: make test-docker ARGS="-k swarm -v"
test-docker: test-docker-build
docker compose -f docker-compose.test.yml run --rm test \
pytest tests/ -q --tb=short $(ARGS)
docker compose -f docker-compose.test.yml down -v
# Run tests with coverage inside a container
test-docker-cov: test-docker-build
docker compose -f docker-compose.test.yml run --rm test \
pytest tests/ --cov=src --cov-report=term-missing -q $(ARGS)
docker compose -f docker-compose.test.yml down -v
# Spin up the full stack (dashboard + optional Ollama) and run functional tests
test-docker-functional: test-docker-build
docker compose -f docker-compose.test.yml --profile functional up -d --wait
docker compose -f docker-compose.test.yml run --rm test \
pytest tests/functional/ -v --tb=short $(ARGS) || true
docker compose -f docker-compose.test.yml --profile functional down -v
# Tear down any leftover test containers and volumes
test-docker-down:
docker compose -f docker-compose.test.yml --profile functional --profile ollama --profile agents down -v
# ── Code quality ────────────────────────────────────────────────────────────── # ── Code quality ──────────────────────────────────────────────────────────────
lint: lint:
@@ -226,6 +260,22 @@ cloud-scale:
cloud-pull-model: cloud-pull-model:
docker exec timmy-ollama ollama pull $${MODEL:-llama3.2} docker exec timmy-ollama ollama pull $${MODEL:-llama3.2}
# ── ELK Logging ──────────────────────────────────────────────────────────────
# Overlay on top of the production stack for centralised log aggregation.
# Kibana UI: http://localhost:5601
logs-up:
docker compose -f docker-compose.prod.yml -f docker-compose.logging.yml up -d
logs-down:
docker compose -f docker-compose.prod.yml -f docker-compose.logging.yml down
logs-kibana:
@echo "Opening Kibana at http://localhost:5601 ..."
@command -v open >/dev/null 2>&1 && open http://localhost:5601 || \
command -v xdg-open >/dev/null 2>&1 && xdg-open http://localhost:5601 || \
echo " → Open http://localhost:5601 in your browser"
# ── Housekeeping ────────────────────────────────────────────────────────────── # ── Housekeeping ──────────────────────────────────────────────────────────────
clean: clean:
@@ -268,6 +318,15 @@ help:
@echo " make pre-commit-install install pre-commit hooks" @echo " make pre-commit-install install pre-commit hooks"
@echo " make clean remove build artefacts and caches" @echo " make clean remove build artefacts and caches"
@echo "" @echo ""
@echo " Docker Testing (Clean Containers)"
@echo " ─────────────────────────────────────────────────"
@echo " make test-docker run tests in clean container"
@echo " make test-docker ARGS=\"-k swarm\" filter tests in container"
@echo " make test-docker-cov tests + coverage in container"
@echo " make test-docker-functional full-stack functional tests"
@echo " make test-docker-build build test image (cached)"
@echo " make test-docker-down tear down test containers"
@echo ""
@echo " Docker (Advanced)" @echo " Docker (Advanced)"
@echo " ─────────────────────────────────────────────────" @echo " ─────────────────────────────────────────────────"
@echo " make docker-build build the timmy-time:latest image" @echo " make docker-build build the timmy-time:latest image"
@@ -289,3 +348,9 @@ help:
@echo " make cloud-scale N=4 scale agent workers" @echo " make cloud-scale N=4 scale agent workers"
@echo " make cloud-pull-model MODEL=llama3.2 pull LLM model" @echo " make cloud-pull-model MODEL=llama3.2 pull LLM model"
@echo "" @echo ""
@echo " ELK Log Aggregation"
@echo " ─────────────────────────────────────────────────"
@echo " make logs-up start prod + ELK stack"
@echo " make logs-down stop prod + ELK stack"
@echo " make logs-kibana open Kibana UI (http://localhost:5601)"
@echo ""

View File

@@ -0,0 +1,23 @@
# ── Elasticsearch — Single-node config for Timmy Time ───────────────────────
#
# Minimal config for a single-node deployment. For multi-node clusters
# see: https://www.elastic.co/guide/en/elasticsearch/reference/current/docker.html
cluster.name: timmy-logs
node.name: timmy-es-01
# Single-node discovery (no cluster formation overhead)
discovery.type: single-node
# Bind to all interfaces inside the container
network.host: 0.0.0.0
# Security: disable X-Pack security for internal-only deployments.
# Enable and configure TLS if exposing Elasticsearch externally.
xpack.security.enabled: false
# Memory: let the JVM use container-aware defaults
# (set ES_JAVA_OPTS in docker-compose for explicit heap)
# Index lifecycle — auto-delete old logs after 30 days
# Applied via ILM policy created by Logstash on first boot.

14
deploy/elk/kibana.yml Normal file
View File

@@ -0,0 +1,14 @@
# ── Kibana — Dashboard config for Timmy Time ────────────────────────────────
server.name: timmy-kibana
server.host: "0.0.0.0"
server.port: 5601
# Connect to Elasticsearch on the Docker network
elasticsearch.hosts: ["http://elasticsearch:9200"]
# Disable telemetry (sovereign deployment)
telemetry.enabled: false
# Default index pattern
# Kibana will auto-create this on first boot when logs arrive.

78
deploy/elk/logstash.conf Normal file
View File

@@ -0,0 +1,78 @@
# ── Logstash Pipeline — Timmy Time log aggregation ──────────────────────────
#
# Collects Docker container logs via the GELF input, parses them,
# and ships structured events to Elasticsearch.
#
# Flow: Docker (GELF driver) → Logstash :12201/udp → Elasticsearch
input {
# GELF (Graylog Extended Log Format) — Docker's native structured log driver.
# Each container sends logs here automatically via the logging driver config
# in docker-compose.logging.yml.
gelf {
port => 12201
type => "docker"
}
}
filter {
# ── Tag by container name ──────────────────────────────────────────────────
# Docker GELF driver populates these fields automatically:
# container_name, container_id, image_name, tag, command, created
# Strip leading "/" from container_name (Docker convention)
if [container_name] {
mutate {
gsub => ["container_name", "^/", ""]
}
}
# ── Parse JSON log lines (FastAPI/uvicorn emit JSON when configured) ──────
if [message] =~ /^\{/ {
json {
source => "message"
target => "log"
skip_on_invalid_json => true
}
}
# ── Extract log level ─────────────────────────────────────────────────────
# Try structured field first, fall back to regex on raw message
if [log][level] {
mutate { add_field => { "log_level" => "%{[log][level]}" } }
} else if [level] {
mutate { add_field => { "log_level" => "%{level}" } }
} else {
grok {
match => { "message" => "(?i)(?<log_level>DEBUG|INFO|WARNING|ERROR|CRITICAL)" }
tag_on_failure => []
}
}
# Normalise to uppercase
if [log_level] {
mutate { uppercase => ["log_level"] }
}
# ── Add service metadata ──────────────────────────────────────────────────
mutate {
add_field => { "environment" => "production" }
add_field => { "project" => "timmy-time" }
}
}
output {
elasticsearch {
hosts => ["http://elasticsearch:9200"]
index => "timmy-logs-%{+YYYY.MM.dd}"
# ILM policy: auto-rollover + delete after 30 days
ilm_enabled => true
ilm_rollover_alias => "timmy-logs"
ilm_pattern => "{now/d}-000001"
ilm_policy => "timmy-logs-policy"
}
# Also print to stdout for debugging (disable in production)
# stdout { codec => rubydebug }
}

View File

@@ -1,130 +0,0 @@
# ── Timmy Time — Enhanced Docker Compose with Ollama ──────────────────────────
#
# This enhanced version includes Ollama service for local LLM inference.
# Services:
# ollama Local LLM inference server (required for Timmy)
# dashboard FastAPI app + swarm coordinator
# timmy Timmy sovereign agent
# agent Swarm worker template (scale with --scale agent=N --profile agents)
#
# Usage:
# docker compose -f docker-compose.enhanced.yml up -d
# docker compose -f docker-compose.enhanced.yml logs -f dashboard
# docker compose -f docker-compose.enhanced.yml down
services:
# ── Ollama — Local LLM Inference Server ────────────────────────────────────
ollama:
image: ollama/ollama:latest
container_name: timmy-ollama
ports:
- "11434:11434"
volumes:
- ollama-data:/root/.ollama
environment:
OLLAMA_HOST: "0.0.0.0:11434"
networks:
- swarm-net
restart: unless-stopped
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:11434/api/tags"]
interval: 30s
timeout: 5s
retries: 3
start_period: 30s
# ── Dashboard (coordinator + FastAPI) ──────────────────────────────────────
dashboard:
build: .
image: timmy-time:latest
container_name: timmy-dashboard
user: "0:0"
ports:
- "8000:8000"
volumes:
- timmy-data:/app/data
- ./src:/app/src
- ./static:/app/static
environment:
DEBUG: "true"
# Point to Ollama container
OLLAMA_URL: "http://ollama:11434"
GROK_ENABLED: "${GROK_ENABLED:-false}"
XAI_API_KEY: "${XAI_API_KEY:-}"
GROK_DEFAULT_MODEL: "${GROK_DEFAULT_MODEL:-grok-3-fast}"
networks:
- swarm-net
depends_on:
ollama:
condition: service_healthy
restart: unless-stopped
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
interval: 30s
timeout: 5s
retries: 3
start_period: 15s
# ── Timmy — Sovereign AI Agent ─────────────────────────────────────────────
timmy:
build: .
image: timmy-time:latest
container_name: timmy-agent
volumes:
- timmy-data:/app/data
- ./src:/app/src
environment:
COORDINATOR_URL: "http://dashboard:8000"
OLLAMA_URL: "http://ollama:11434"
TIMMY_AGENT_ID: "timmy"
command: ["python", "-m", "timmy.docker_agent"]
networks:
- swarm-net
depends_on:
dashboard:
condition: service_healthy
ollama:
condition: service_healthy
restart: unless-stopped
# ── Agent Worker Template ──────────────────────────────────────────────────
# Scale: docker compose -f docker-compose.enhanced.yml up --scale agent=4 --profile agents
agent:
build: .
image: timmy-time:latest
profiles:
- agents
volumes:
- timmy-data:/app/data
- ./src:/app/src
environment:
COORDINATOR_URL: "http://dashboard:8000"
OLLAMA_URL: "http://ollama:11434"
AGENT_NAME: "${AGENT_NAME:-Worker}"
AGENT_CAPABILITIES: "${AGENT_CAPABILITIES:-general}"
command: ["sh", "-c", "python -m swarm.agent_runner --agent-id agent-$(hostname) --name $${AGENT_NAME:-Worker}"]
networks:
- swarm-net
depends_on:
dashboard:
condition: service_healthy
ollama:
condition: service_healthy
restart: unless-stopped
# ── Volumes ────────────────────────────────────────────────────────────────────
volumes:
timmy-data:
driver: local
driver_opts:
type: none
o: bind
device: "${PWD}/data"
ollama-data:
driver: local
# ── Internal Network ───────────────────────────────────────────────────────────
networks:
swarm-net:
driver: bridge

127
docker-compose.logging.yml Normal file
View File

@@ -0,0 +1,127 @@
# ── Timmy Time — ELK Log Aggregation Overlay ────────────────────────────────
#
# Adds Elasticsearch + Logstash + Kibana alongside the production stack.
# Use as an overlay on top of the prod compose:
#
# docker compose \
# -f docker-compose.prod.yml \
# -f docker-compose.logging.yml \
# up -d
#
# ── How it works ────────────────────────────────────────────────────────────
#
# 1. Every container's Docker logging driver is set to GELF, which sends
# structured log events (JSON with container metadata) over UDP.
#
# 2. Logstash listens on :12201/udp, parses the GELF messages, extracts
# log levels, parses JSON payloads from FastAPI/uvicorn, and adds
# project metadata.
#
# 3. Logstash ships the enriched events to Elasticsearch, indexed by day
# (timmy-logs-YYYY.MM.dd) with a 30-day ILM retention policy.
#
# 4. Kibana provides the web UI on :5601 for searching, filtering,
# and building dashboards over the indexed logs.
#
# ── Access ──────────────────────────────────────────────────────────────────
# Kibana: http://localhost:5601
# Elasticsearch: http://localhost:9200 (API only, not exposed by default)
#
# ── Resource notes ──────────────────────────────────────────────────────────
# Elasticsearch: ~512 MB heap (ES_JAVA_OPTS below). Increase for
# high-throughput deployments.
# Logstash: ~256 MB heap. Lightweight for GELF → ES pipeline.
# Kibana: ~300 MB RAM. Stateless — safe to restart anytime.
#
# Total overhead: ~1.1 GB RAM on top of the base production stack.
services:
# ── Elasticsearch — log storage and search engine ─────────────────────────
elasticsearch:
image: docker.elastic.co/elasticsearch/elasticsearch:8.17.0
container_name: timmy-elasticsearch
volumes:
- es-data:/usr/share/elasticsearch/data
- ./deploy/elk/elasticsearch.yml:/usr/share/elasticsearch/config/elasticsearch.yml:ro
environment:
ES_JAVA_OPTS: "-Xms512m -Xmx512m"
networks:
- swarm-net
restart: unless-stopped
healthcheck:
test: ["CMD-SHELL", "curl -sf http://localhost:9200/_cluster/health | grep -q '\"status\":\"green\\|yellow\"'"]
interval: 30s
timeout: 10s
retries: 5
start_period: 60s
# ── Logstash — log pipeline (GELF in → Elasticsearch out) ────────────────
logstash:
image: docker.elastic.co/logstash/logstash:8.17.0
container_name: timmy-logstash
volumes:
- ./deploy/elk/logstash.conf:/usr/share/logstash/pipeline/logstash.conf:ro
environment:
LS_JAVA_OPTS: "-Xms256m -Xmx256m"
ports:
- "12201:12201/udp" # GELF input from Docker logging driver
networks:
- swarm-net
depends_on:
elasticsearch:
condition: service_healthy
restart: unless-stopped
# ── Kibana — log visualisation UI ─────────────────────────────────────────
kibana:
image: docker.elastic.co/kibana/kibana:8.17.0
container_name: timmy-kibana
volumes:
- ./deploy/elk/kibana.yml:/usr/share/kibana/config/kibana.yml:ro
ports:
- "5601:5601"
networks:
- swarm-net
depends_on:
elasticsearch:
condition: service_healthy
restart: unless-stopped
# ── Override existing services to use GELF logging driver ─────────────────
# These extend the services defined in docker-compose.prod.yml.
# Docker merges the logging config into the existing service definition.
dashboard:
logging:
driver: gelf
options:
gelf-address: "udp://localhost:12201"
tag: "dashboard"
depends_on:
logstash:
condition: service_started
timmy:
logging:
driver: gelf
options:
gelf-address: "udp://localhost:12201"
tag: "timmy-agent"
depends_on:
logstash:
condition: service_started
ollama:
logging:
driver: gelf
options:
gelf-address: "udp://localhost:12201"
tag: "ollama"
depends_on:
logstash:
condition: service_started
# ── Persistent volume for Elasticsearch indices ────────────────────────────
volumes:
es-data:

View File

@@ -11,8 +11,6 @@
# docker compose -f docker-compose.microservices.yml logs -f dashboard # docker compose -f docker-compose.microservices.yml logs -f dashboard
# docker compose -f docker-compose.microservices.yml up --scale worker=4 # docker compose -f docker-compose.microservices.yml up --scale worker=4
version: "3.9"
services: services:
# ── Ollama LLM Service ──────────────────────────────────────────────────── # ── Ollama LLM Service ────────────────────────────────────────────────────

View File

@@ -1,54 +1,73 @@
# ── Timmy Time — test stack ────────────────────────────────────────────────── # ── Timmy Time — Test Stack ──────────────────────────────────────────────────
# #
# Lightweight compose for functional tests. Runs the dashboard on port 18000 # Clean containers for test runs. Designed for fast iteration:
# and optional agent workers on the swarm-test-net network. # • Cached builder layers — only rebuilds when pyproject.toml changes
# • Bind-mounted source — code changes are instant, no rebuild needed
# • Ephemeral test-data — every run starts with clean state
# #
# Profiles: # ── Profiles ────────────────────────────────────────────────────────────────
# (default) dashboard only (Ollama on host via host.docker.internal) # (default) test runner only (unit + integration tests)
# ollama adds a containerised Ollama instance + auto model pull # functional adds a live dashboard on port 18000 for HTTP-level tests
# agents adds scalable agent workers # ollama adds containerised Ollama (CPU, qwen2.5:0.5b) for LLM tests
# agents adds swarm agent workers for multi-agent tests
# #
# Usage: # ── Quick-start ─────────────────────────────────────────────────────────────
# # Swarm tests (no LLM needed): # make test-docker # unit + integration in container
# FUNCTIONAL_DOCKER=1 pytest tests/functional/test_docker_swarm.py -v # make test-docker ARGS="-k swarm" # filter tests
# make test-docker-functional # full-stack functional tests
# make test-docker-cov # with coverage report
# #
# # Full-stack with Ollama (pulls qwen2.5:0.5b automatically): # ── Manual usage ────────────────────────────────────────────────────────────
# FUNCTIONAL_DOCKER=1 pytest tests/functional/test_ollama_chat.py -v # docker compose -f docker-compose.test.yml run --rm test
# # docker compose -f docker-compose.test.yml run --rm test pytest tests/swarm -v
# Or manually: # docker compose -f docker-compose.test.yml --profile functional up -d --wait
# docker compose -f docker-compose.test.yml -p timmy-test up -d --build --wait # docker compose -f docker-compose.test.yml down -v
# curl http://localhost:18000/health
# docker compose -f docker-compose.test.yml -p timmy-test down -v
services: services:
# ── Ollama — local LLM for functional tests ─────────────────────────────── # ── Test Runner ───────────────────────────────────────────────────────────
# Activated with: --profile ollama # Runs pytest in a clean container. Exits when tests complete.
# Uses a tiny model (qwen2.5:0.5b, ~400 MB) so it runs on CPU-only CI. # Source and tests are bind-mounted so code changes don't require a rebuild.
ollama: test:
image: ollama/ollama:latest build:
container_name: timmy-test-ollama context: .
profiles: dockerfile: docker/Dockerfile.test
- ollama cache_from:
- timmy-test:latest
image: timmy-test:latest
volumes:
- ./src:/app/src:ro
- ./tests:/app/tests:ro
- ./static:/app/static:ro
- ./pyproject.toml:/app/pyproject.toml:ro
- test-data:/app/data
environment:
TIMMY_TEST_MODE: "1"
LIGHTNING_BACKEND: "mock"
PYTHONDONTWRITEBYTECODE: "1"
networks: networks:
- swarm-test-net - test-net
healthcheck: # Default command — override with: docker compose run --rm test pytest <args>
test: ["CMD", "curl", "-f", "http://localhost:11434/api/tags"] command: ["pytest", "tests/", "-q", "--tb=short"]
interval: 5s
timeout: 5s
retries: 20
start_period: 10s
# ── Dashboard — live server for functional tests ──────────────────────────
# Activated with: --profile functional
dashboard: dashboard:
build: . build:
image: timmy-time:test context: .
dockerfile: docker/Dockerfile.test
cache_from:
- timmy-test:latest
image: timmy-test:latest
profiles:
- functional
container_name: timmy-test-dashboard container_name: timmy-test-dashboard
ports: ports:
- "18000:8000" - "18000:8000"
volumes: volumes:
- ./src:/app/src:ro
- ./static:/app/static:ro
- test-data:/app/data - test-data:/app/data
- ./src:/app/src
- ./static:/app/static
environment: environment:
DEBUG: "true" DEBUG: "true"
TIMMY_TEST_MODE: "1" TIMMY_TEST_MODE: "1"
@@ -58,7 +77,8 @@ services:
extra_hosts: extra_hosts:
- "host.docker.internal:host-gateway" - "host.docker.internal:host-gateway"
networks: networks:
- swarm-test-net - test-net
command: ["uvicorn", "dashboard.app:app", "--host", "0.0.0.0", "--port", "8000"]
healthcheck: healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:8000/health"] test: ["CMD", "curl", "-f", "http://localhost:8000/health"]
interval: 5s interval: 5s
@@ -66,14 +86,38 @@ services:
retries: 10 retries: 10
start_period: 10s start_period: 10s
# ── Ollama — local LLM for functional tests ──────────────────────────────
# Activated with: --profile ollama
# Uses a tiny model (qwen2.5:0.5b, ~400 MB) so it runs on CPU-only CI.
ollama:
image: ollama/ollama:latest
container_name: timmy-test-ollama
profiles:
- ollama
networks:
- test-net
healthcheck:
test: ["CMD", "curl", "-f", "http://localhost:11434/api/tags"]
interval: 5s
timeout: 5s
retries: 20
start_period: 10s
# ── Agent — swarm worker for multi-agent tests ───────────────────────────
# Activated with: --profile agents
# Scale: docker compose -f docker-compose.test.yml --profile agents up --scale agent=4
agent: agent:
build: . build:
image: timmy-time:test context: .
dockerfile: docker/Dockerfile.test
cache_from:
- timmy-test:latest
image: timmy-test:latest
profiles: profiles:
- agents - agents
volumes: volumes:
- ./src:/app/src:ro
- test-data:/app/data - test-data:/app/data
- ./src:/app/src
environment: environment:
COORDINATOR_URL: "http://dashboard:8000" COORDINATOR_URL: "http://dashboard:8000"
OLLAMA_URL: "${OLLAMA_URL:-http://host.docker.internal:11434}" OLLAMA_URL: "${OLLAMA_URL:-http://host.docker.internal:11434}"
@@ -83,16 +127,21 @@ services:
TIMMY_TEST_MODE: "1" TIMMY_TEST_MODE: "1"
extra_hosts: extra_hosts:
- "host.docker.internal:host-gateway" - "host.docker.internal:host-gateway"
command: ["sh", "-c", "python -m swarm.agent_runner --agent-id agent-$(hostname) --name $${AGENT_NAME:-TestWorker}"] command: >-
sh -c "python -m swarm.agent_runner
--agent-id agent-$$(hostname)
--name $${AGENT_NAME:-TestWorker}"
networks: networks:
- swarm-test-net - test-net
depends_on: depends_on:
dashboard: dashboard:
condition: service_healthy condition: service_healthy
# ── Ephemeral volume — destroyed with `docker compose down -v` ─────────────
volumes: volumes:
test-data: test-data:
# ── Isolated test network ─────────────────────────────────────────────────
networks: networks:
swarm-test-net: test-net:
driver: bridge driver: bridge

View File

@@ -1,7 +1,8 @@
# ── Timmy Time — docker-compose ───────────────────────────────────────────── # ── Timmy Time — Development Compose ────────────────────────────────────────
# #
# Services # Services
# dashboard FastAPI app + swarm coordinator (always on) # dashboard FastAPI app + swarm coordinator (always on)
# timmy Sovereign AI agent (separate container)
# agent Swarm worker template — scale with: # agent Swarm worker template — scale with:
# docker compose up --scale agent=N --profile agents # docker compose up --scale agent=N --profile agents
# #
@@ -14,6 +15,27 @@
# make docker-agent add one agent worker # make docker-agent add one agent worker
# make docker-down stop everything # make docker-down stop everything
# make docker-logs tail logs # make docker-logs tail logs
#
# ── Security note: root user in dev ─────────────────────────────────────────
# This dev compose runs containers as root (user: "0:0") so that
# bind-mounted host files (./src, ./static) are readable regardless of
# host UID/GID — the #1 cause of 403 errors on macOS.
#
# Production (docker-compose.prod.yml) uses NO bind mounts and runs as
# the Dockerfile's non-root "timmy" user. Never expose this dev compose
# to untrusted networks.
#
# ── Ollama host access ──────────────────────────────────────────────────────
# By default OLLAMA_URL points to http://host.docker.internal:11434 which
# reaches Ollama running on the Docker host (macOS/Windows native).
#
# Linux: The extra_hosts entry maps host.docker.internal → host-gateway,
# which resolves to the host IP on Docker 20.10+. If you run an
# older Docker version, set OLLAMA_URL=http://172.17.0.1:11434
# in your .env file instead.
#
# Containerised Ollama: Use docker-compose.microservices.yml which runs
# Ollama as a sibling container on the same network.
services: services:
@@ -22,12 +44,7 @@ services:
build: . build: .
image: timmy-time:latest image: timmy-time:latest
container_name: timmy-dashboard container_name: timmy-dashboard
# Run as root in the dev compose because bind-mounted host files user: "0:0" # dev only — see security note above
# (./src, ./static) may not be readable by the image's non-root
# "timmy" user — this is the #1 cause of 403 errors on macOS.
# Production (docker-compose.prod.yml) uses no bind mounts and
# correctly runs as the Dockerfile's non-root USER.
user: "0:0"
ports: ports:
- "8000:8000" - "8000:8000"
volumes: volumes:
@@ -36,14 +53,13 @@ services:
- ./static:/app/static # live-reload: CSS/asset changes reflect immediately - ./static:/app/static # live-reload: CSS/asset changes reflect immediately
environment: environment:
DEBUG: "true" DEBUG: "true"
# Point to host Ollama (Mac default). Override in .env if different.
OLLAMA_URL: "${OLLAMA_URL:-http://host.docker.internal:11434}" OLLAMA_URL: "${OLLAMA_URL:-http://host.docker.internal:11434}"
# Grok (xAI) — opt-in premium cloud backend # Grok (xAI) — opt-in premium cloud backend
GROK_ENABLED: "${GROK_ENABLED:-false}" GROK_ENABLED: "${GROK_ENABLED:-false}"
XAI_API_KEY: "${XAI_API_KEY:-}" XAI_API_KEY: "${XAI_API_KEY:-}"
GROK_DEFAULT_MODEL: "${GROK_DEFAULT_MODEL:-grok-3-fast}" GROK_DEFAULT_MODEL: "${GROK_DEFAULT_MODEL:-grok-3-fast}"
extra_hosts: extra_hosts:
- "host.docker.internal:host-gateway" # Linux compatibility - "host.docker.internal:host-gateway" # Linux: maps to host IP
networks: networks:
- swarm-net - swarm-net
restart: unless-stopped restart: unless-stopped

59
docker/Dockerfile.test Normal file
View File

@@ -0,0 +1,59 @@
# ── Timmy Time — Test Runner Image ───────────────────────────────────────────
#
# Lean image with test dependencies baked in. Designed to be used with
# docker-compose.test.yml which bind-mounts src/, tests/, and static/
# so you never rebuild for code changes — only when deps change.
#
# Build: docker compose -f docker-compose.test.yml build
# Run: docker compose -f docker-compose.test.yml run --rm test
#
# The builder stage is shared with the production Dockerfile so
# dependency layers stay cached across dev ↔ test ↔ prod builds.
# ── Stage 1: Builder — export deps via Poetry, install via pip ──────────────
FROM python:3.12-slim AS builder
WORKDIR /build
RUN apt-get update && apt-get install -y --no-install-recommends \
gcc curl \
&& rm -rf /var/lib/apt/lists/*
RUN pip install --no-cache-dir poetry poetry-plugin-export
# Copy only dependency files (layer caching — rebuilds only when deps change)
COPY pyproject.toml poetry.lock ./
# Export ALL deps including dev/test extras
RUN poetry export --extras swarm --extras telegram --extras dev \
--with dev --without-hashes \
-f requirements.txt -o requirements.txt
RUN --mount=type=cache,target=/root/.cache/pip \
pip install --no-cache-dir -r requirements.txt
# ── Stage 2: Test runtime ───────────────────────────────────────────────────
FROM python:3.12-slim
WORKDIR /app
RUN apt-get update && apt-get install -y --no-install-recommends \
curl git \
&& rm -rf /var/lib/apt/lists/*
# Copy installed packages from builder
COPY --from=builder /usr/local/lib/python3.12/site-packages \
/usr/local/lib/python3.12/site-packages
COPY --from=builder /usr/local/bin /usr/local/bin
# Create directories for bind mounts
RUN mkdir -p /app/src /app/tests /app/static /app/data
ENV PYTHONPATH=/app/src:/app/tests
ENV PYTHONUNBUFFERED=1
ENV PYTHONDONTWRITEBYTECODE=1
ENV TIMMY_TEST_MODE=1
# Default: run pytest (overridable via docker-compose command)
CMD ["pytest", "tests/", "-q", "--tb=short"]

View File

@@ -2,11 +2,20 @@
# ── Ollama Initialization Script ────────────────────────────────────────────── # ── Ollama Initialization Script ──────────────────────────────────────────────
# #
# Starts Ollama and pulls models on first run. # Starts Ollama and pulls models on first run.
# Requires: curl (ships with the ollama image).
# jq is installed at runtime if missing so we can parse /api/tags reliably
# instead of fragile grep-based JSON extraction.
set -e set -e
echo "🚀 Ollama startup — checking for models..." echo "🚀 Ollama startup — checking for models..."
# ── Ensure jq is available (ollama image is Debian-based) ────────────────────
if ! command -v jq &>/dev/null; then
echo "📦 Installing jq for reliable JSON parsing..."
apt-get update -qq && apt-get install -y -qq jq >/dev/null 2>&1 || true
fi
# Start Ollama in background # Start Ollama in background
ollama serve & ollama serve &
OLLAMA_PID=$! OLLAMA_PID=$!
@@ -18,15 +27,26 @@ for i in {1..60}; do
echo "✓ Ollama is ready" echo "✓ Ollama is ready"
break break
fi fi
if [ "$i" -eq 60 ]; then
echo "❌ Ollama failed to start after 60 s"
exit 1
fi
echo " Attempt $i/60..." echo " Attempt $i/60..."
sleep 1 sleep 1
done done
# Check if models are already present # Check if models are already present (jq with grep fallback)
echo "📋 Checking available models..." echo "📋 Checking available models..."
MODELS=$(curl -s http://localhost:11434/api/tags | grep -o '"name":"[^"]*"' | wc -l) TAGS_JSON=$(curl -s http://localhost:11434/api/tags)
if [ "$MODELS" -eq 0 ]; then if command -v jq &>/dev/null; then
MODELS=$(echo "$TAGS_JSON" | jq '.models | length')
else
# Fallback: count "name" keys (less reliable but functional)
MODELS=$(echo "$TAGS_JSON" | grep -o '"name"' | wc -l)
fi
if [ "${MODELS:-0}" -eq 0 ]; then
echo "📥 No models found. Pulling llama3.2..." echo "📥 No models found. Pulling llama3.2..."
ollama pull llama3.2 || echo "⚠️ Failed to pull llama3.2 (may already be pulling)" ollama pull llama3.2 || echo "⚠️ Failed to pull llama3.2 (may already be pulling)"
else else

74
poetry.lock generated
View File

@@ -458,7 +458,7 @@ files = [
{file = "attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373"}, {file = "attrs-25.4.0-py3-none-any.whl", hash = "sha256:adcf7e2a1fb3b36ac48d97835bb6d8ade15b8dcce26aba8bf1d14847b57a3373"},
{file = "attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11"}, {file = "attrs-25.4.0.tar.gz", hash = "sha256:16d5969b87f0859ef33a48b35d55ac1be6e42ae49d5e853b597db70c35c57e11"},
] ]
markers = {main = "extra == \"discord\""} markers = {main = "extra == \"discord\" or extra == \"dev\""}
[[package]] [[package]]
name = "audioop-lts" name = "audioop-lts"
@@ -538,8 +538,7 @@ version = "2.0.0"
description = "Foreign Function Interface for Python calling C code." description = "Foreign Function Interface for Python calling C code."
optional = false optional = false
python-versions = ">=3.9" python-versions = ">=3.9"
groups = ["dev"] groups = ["main", "dev"]
markers = "os_name == \"nt\" and implementation_name != \"pypy\""
files = [ files = [
{file = "cffi-2.0.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:0cf2d91ecc3fcc0625c2c530fe004f82c110405f101548512cce44322fa8ac44"}, {file = "cffi-2.0.0-cp310-cp310-macosx_10_13_x86_64.whl", hash = "sha256:0cf2d91ecc3fcc0625c2c530fe004f82c110405f101548512cce44322fa8ac44"},
{file = "cffi-2.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f73b96c41e3b2adedc34a7356e64c8eb96e03a3782b535e043a986276ce12a49"}, {file = "cffi-2.0.0-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:f73b96c41e3b2adedc34a7356e64c8eb96e03a3782b535e043a986276ce12a49"},
@@ -626,6 +625,7 @@ files = [
{file = "cffi-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:b882b3df248017dba09d6b16defe9b5c407fe32fc7c65a9c69798e6175601be9"}, {file = "cffi-2.0.0-cp39-cp39-win_amd64.whl", hash = "sha256:b882b3df248017dba09d6b16defe9b5c407fe32fc7c65a9c69798e6175601be9"},
{file = "cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529"}, {file = "cffi-2.0.0.tar.gz", hash = "sha256:44d1b5909021139fe36001ae048dbdde8214afa20200eda0f64c068cac5d5529"},
] ]
markers = {main = "os_name == \"nt\" and implementation_name != \"pypy\" and extra == \"dev\"", dev = "os_name == \"nt\" and implementation_name != \"pypy\""}
[package.dependencies] [package.dependencies]
pycparser = {version = "*", markers = "implementation_name != \"PyPy\""} pycparser = {version = "*", markers = "implementation_name != \"PyPy\""}
@@ -800,7 +800,7 @@ version = "7.13.4"
description = "Code coverage measurement for Python" description = "Code coverage measurement for Python"
optional = false optional = false
python-versions = ">=3.10" python-versions = ">=3.10"
groups = ["dev"] groups = ["main", "dev"]
files = [ files = [
{file = "coverage-7.13.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0fc31c787a84f8cd6027eba44010517020e0d18487064cd3d8968941856d1415"}, {file = "coverage-7.13.4-cp310-cp310-macosx_10_9_x86_64.whl", hash = "sha256:0fc31c787a84f8cd6027eba44010517020e0d18487064cd3d8968941856d1415"},
{file = "coverage-7.13.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a32ebc02a1805adf637fc8dec324b5cdacd2e493515424f70ee33799573d661b"}, {file = "coverage-7.13.4-cp310-cp310-macosx_11_0_arm64.whl", hash = "sha256:a32ebc02a1805adf637fc8dec324b5cdacd2e493515424f70ee33799573d661b"},
@@ -909,6 +909,7 @@ files = [
{file = "coverage-7.13.4-py3-none-any.whl", hash = "sha256:1af1641e57cf7ba1bd67d677c9abdbcd6cc2ab7da3bca7fa1e2b7e50e65f2ad0"}, {file = "coverage-7.13.4-py3-none-any.whl", hash = "sha256:1af1641e57cf7ba1bd67d677c9abdbcd6cc2ab7da3bca7fa1e2b7e50e65f2ad0"},
{file = "coverage-7.13.4.tar.gz", hash = "sha256:e5c8f6ed1e61a8b2dcdf31eb0b9bbf0130750ca79c1c49eb898e2ad86f5ccc91"}, {file = "coverage-7.13.4.tar.gz", hash = "sha256:e5c8f6ed1e61a8b2dcdf31eb0b9bbf0130750ca79c1c49eb898e2ad86f5ccc91"},
] ]
markers = {main = "extra == \"dev\""}
[package.extras] [package.extras]
toml = ["tomli ; python_full_version <= \"3.11.0a6\""] toml = ["tomli ; python_full_version <= \"3.11.0a6\""]
@@ -1599,11 +1600,12 @@ version = "2.3.0"
description = "brain-dead simple config-ini parsing" description = "brain-dead simple config-ini parsing"
optional = false optional = false
python-versions = ">=3.10" python-versions = ">=3.10"
groups = ["dev"] groups = ["main", "dev"]
files = [ files = [
{file = "iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12"}, {file = "iniconfig-2.3.0-py3-none-any.whl", hash = "sha256:f631c04d2c48c52b84d0d0549c99ff3859c98df65b3101406327ecc7d53fbf12"},
{file = "iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730"}, {file = "iniconfig-2.3.0.tar.gz", hash = "sha256:c76315c77db068650d49c5b56314774a7804df16fee4402c1f19d6d15d8c4730"},
] ]
markers = {main = "extra == \"dev\""}
[[package]] [[package]]
name = "jinja2" name = "jinja2"
@@ -2490,11 +2492,12 @@ version = "1.3.0.post0"
description = "Capture the outcome of Python function calls." description = "Capture the outcome of Python function calls."
optional = false optional = false
python-versions = ">=3.7" python-versions = ">=3.7"
groups = ["dev"] groups = ["main", "dev"]
files = [ files = [
{file = "outcome-1.3.0.post0-py2.py3-none-any.whl", hash = "sha256:e771c5ce06d1415e356078d3bdd68523f284b4ce5419828922b6871e65eda82b"}, {file = "outcome-1.3.0.post0-py2.py3-none-any.whl", hash = "sha256:e771c5ce06d1415e356078d3bdd68523f284b4ce5419828922b6871e65eda82b"},
{file = "outcome-1.3.0.post0.tar.gz", hash = "sha256:9dcf02e65f2971b80047b377468e72a268e15c0af3cf1238e6ff14f7f91143b8"}, {file = "outcome-1.3.0.post0.tar.gz", hash = "sha256:9dcf02e65f2971b80047b377468e72a268e15c0af3cf1238e6ff14f7f91143b8"},
] ]
markers = {main = "extra == \"dev\""}
[package.dependencies] [package.dependencies]
attrs = ">=19.2.0" attrs = ">=19.2.0"
@@ -2642,11 +2645,12 @@ version = "1.6.0"
description = "plugin and hook calling mechanisms for python" description = "plugin and hook calling mechanisms for python"
optional = false optional = false
python-versions = ">=3.9" python-versions = ">=3.9"
groups = ["dev"] groups = ["main", "dev"]
files = [ files = [
{file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"}, {file = "pluggy-1.6.0-py3-none-any.whl", hash = "sha256:e920276dd6813095e9377c0bc5566d94c932c33b27a3e3945d8389c374dd4746"},
{file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"}, {file = "pluggy-1.6.0.tar.gz", hash = "sha256:7dcc130b76258d33b90f61b658791dede3486c3e6bfb003ee5c9bfb396dd22f3"},
] ]
markers = {main = "extra == \"dev\""}
[package.extras] [package.extras]
dev = ["pre-commit", "tox"] dev = ["pre-commit", "tox"]
@@ -2842,12 +2846,12 @@ version = "3.0"
description = "C parser in Python" description = "C parser in Python"
optional = false optional = false
python-versions = ">=3.10" python-versions = ">=3.10"
groups = ["dev"] groups = ["main", "dev"]
markers = "os_name == \"nt\" and implementation_name != \"pypy\" and implementation_name != \"PyPy\""
files = [ files = [
{file = "pycparser-3.0-py3-none-any.whl", hash = "sha256:b727414169a36b7d524c1c3e31839a521725078d7b2ff038656844266160a992"}, {file = "pycparser-3.0-py3-none-any.whl", hash = "sha256:b727414169a36b7d524c1c3e31839a521725078d7b2ff038656844266160a992"},
{file = "pycparser-3.0.tar.gz", hash = "sha256:600f49d217304a5902ac3c37e1281c9fe94e4d0489de643a9504c5cdfdfc6b29"}, {file = "pycparser-3.0.tar.gz", hash = "sha256:600f49d217304a5902ac3c37e1281c9fe94e4d0489de643a9504c5cdfdfc6b29"},
] ]
markers = {main = "os_name == \"nt\" and implementation_name != \"pypy\" and implementation_name != \"PyPy\" and extra == \"dev\"", dev = "os_name == \"nt\" and implementation_name != \"pypy\" and implementation_name != \"PyPy\""}
[[package]] [[package]]
name = "pydantic" name = "pydantic"
@@ -6617,12 +6621,13 @@ version = "1.7.1"
description = "A Python SOCKS client module. See https://github.com/Anorov/PySocks for more information." description = "A Python SOCKS client module. See https://github.com/Anorov/PySocks for more information."
optional = false optional = false
python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*" python-versions = ">=2.7, !=3.0.*, !=3.1.*, !=3.2.*, !=3.3.*"
groups = ["dev"] groups = ["main", "dev"]
files = [ files = [
{file = "PySocks-1.7.1-py27-none-any.whl", hash = "sha256:08e69f092cc6dbe92a0fdd16eeb9b9ffbc13cadfe5ca4c7bd92ffb078b293299"}, {file = "PySocks-1.7.1-py27-none-any.whl", hash = "sha256:08e69f092cc6dbe92a0fdd16eeb9b9ffbc13cadfe5ca4c7bd92ffb078b293299"},
{file = "PySocks-1.7.1-py3-none-any.whl", hash = "sha256:2725bd0a9925919b9b51739eea5f9e2bae91e83288108a9ad338b2e3a4435ee5"}, {file = "PySocks-1.7.1-py3-none-any.whl", hash = "sha256:2725bd0a9925919b9b51739eea5f9e2bae91e83288108a9ad338b2e3a4435ee5"},
{file = "PySocks-1.7.1.tar.gz", hash = "sha256:3f8804571ebe159c380ac6de37643bb4685970655d3bba243530d6558b799aa0"}, {file = "PySocks-1.7.1.tar.gz", hash = "sha256:3f8804571ebe159c380ac6de37643bb4685970655d3bba243530d6558b799aa0"},
] ]
markers = {main = "extra == \"dev\""}
[[package]] [[package]]
name = "pytest" name = "pytest"
@@ -6630,11 +6635,12 @@ version = "9.0.2"
description = "pytest: simple powerful testing with Python" description = "pytest: simple powerful testing with Python"
optional = false optional = false
python-versions = ">=3.10" python-versions = ">=3.10"
groups = ["dev"] groups = ["main", "dev"]
files = [ files = [
{file = "pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b"}, {file = "pytest-9.0.2-py3-none-any.whl", hash = "sha256:711ffd45bf766d5264d487b917733b453d917afd2b0ad65223959f59089f875b"},
{file = "pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11"}, {file = "pytest-9.0.2.tar.gz", hash = "sha256:75186651a92bd89611d1d9fc20f0b4345fd827c41ccd5c299a868a05d70edf11"},
] ]
markers = {main = "extra == \"dev\""}
[package.dependencies] [package.dependencies]
colorama = {version = ">=0.4", markers = "sys_platform == \"win32\""} colorama = {version = ">=0.4", markers = "sys_platform == \"win32\""}
@@ -6652,11 +6658,12 @@ version = "1.3.0"
description = "Pytest support for asyncio" description = "Pytest support for asyncio"
optional = false optional = false
python-versions = ">=3.10" python-versions = ">=3.10"
groups = ["dev"] groups = ["main", "dev"]
files = [ files = [
{file = "pytest_asyncio-1.3.0-py3-none-any.whl", hash = "sha256:611e26147c7f77640e6d0a92a38ed17c3e9848063698d5c93d5aa7aa11cebff5"}, {file = "pytest_asyncio-1.3.0-py3-none-any.whl", hash = "sha256:611e26147c7f77640e6d0a92a38ed17c3e9848063698d5c93d5aa7aa11cebff5"},
{file = "pytest_asyncio-1.3.0.tar.gz", hash = "sha256:d7f52f36d231b80ee124cd216ffb19369aa168fc10095013c6b014a34d3ee9e5"}, {file = "pytest_asyncio-1.3.0.tar.gz", hash = "sha256:d7f52f36d231b80ee124cd216ffb19369aa168fc10095013c6b014a34d3ee9e5"},
] ]
markers = {main = "extra == \"dev\""}
[package.dependencies] [package.dependencies]
pytest = ">=8.2,<10" pytest = ">=8.2,<10"
@@ -6672,11 +6679,12 @@ version = "7.0.0"
description = "Pytest plugin for measuring coverage." description = "Pytest plugin for measuring coverage."
optional = false optional = false
python-versions = ">=3.9" python-versions = ">=3.9"
groups = ["dev"] groups = ["main", "dev"]
files = [ files = [
{file = "pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861"}, {file = "pytest_cov-7.0.0-py3-none-any.whl", hash = "sha256:3b8e9558b16cc1479da72058bdecf8073661c7f57f7d3c5f22a1c23507f2d861"},
{file = "pytest_cov-7.0.0.tar.gz", hash = "sha256:33c97eda2e049a0c5298e91f519302a1334c26ac65c1a483d6206fd458361af1"}, {file = "pytest_cov-7.0.0.tar.gz", hash = "sha256:33c97eda2e049a0c5298e91f519302a1334c26ac65c1a483d6206fd458361af1"},
] ]
markers = {main = "extra == \"dev\""}
[package.dependencies] [package.dependencies]
coverage = {version = ">=7.10.6", extras = ["toml"]} coverage = {version = ">=7.10.6", extras = ["toml"]}
@@ -6686,17 +6694,34 @@ pytest = ">=7"
[package.extras] [package.extras]
testing = ["process-tests", "pytest-xdist", "virtualenv"] testing = ["process-tests", "pytest-xdist", "virtualenv"]
[[package]]
name = "pytest-randomly"
version = "4.0.1"
description = "Pytest plugin to randomly order tests and control random.seed."
optional = false
python-versions = ">=3.9"
groups = ["main", "dev"]
files = [
{file = "pytest_randomly-4.0.1-py3-none-any.whl", hash = "sha256:e0dfad2fd4f35e07beff1e47c17fbafcf98f9bf4531fd369d9260e2f858bfcb7"},
{file = "pytest_randomly-4.0.1.tar.gz", hash = "sha256:174e57bb12ac2c26f3578188490bd333f0e80620c3f47340158a86eca0593cd8"},
]
markers = {main = "extra == \"dev\""}
[package.dependencies]
pytest = "*"
[[package]] [[package]]
name = "pytest-timeout" name = "pytest-timeout"
version = "2.4.0" version = "2.4.0"
description = "pytest plugin to abort hanging tests" description = "pytest plugin to abort hanging tests"
optional = false optional = false
python-versions = ">=3.7" python-versions = ">=3.7"
groups = ["dev"] groups = ["main", "dev"]
files = [ files = [
{file = "pytest_timeout-2.4.0-py3-none-any.whl", hash = "sha256:c42667e5cdadb151aeb5b26d114aff6bdf5a907f176a007a30b940d3d865b5c2"}, {file = "pytest_timeout-2.4.0-py3-none-any.whl", hash = "sha256:c42667e5cdadb151aeb5b26d114aff6bdf5a907f176a007a30b940d3d865b5c2"},
{file = "pytest_timeout-2.4.0.tar.gz", hash = "sha256:7e68e90b01f9eff71332b25001f85c75495fc4e3a836701876183c4bcfd0540a"}, {file = "pytest_timeout-2.4.0.tar.gz", hash = "sha256:7e68e90b01f9eff71332b25001f85c75495fc4e3a836701876183c4bcfd0540a"},
] ]
markers = {main = "extra == \"dev\""}
[package.dependencies] [package.dependencies]
pytest = ">=7.0.0" pytest = ">=7.0.0"
@@ -7213,11 +7238,12 @@ version = "4.41.0"
description = "Official Python bindings for Selenium WebDriver" description = "Official Python bindings for Selenium WebDriver"
optional = false optional = false
python-versions = ">=3.10" python-versions = ">=3.10"
groups = ["dev"] groups = ["main", "dev"]
files = [ files = [
{file = "selenium-4.41.0-py3-none-any.whl", hash = "sha256:b8ccde8d2e7642221ca64af184a92c19eee6accf2e27f20f30472f5efae18eb1"}, {file = "selenium-4.41.0-py3-none-any.whl", hash = "sha256:b8ccde8d2e7642221ca64af184a92c19eee6accf2e27f20f30472f5efae18eb1"},
{file = "selenium-4.41.0.tar.gz", hash = "sha256:003e971f805231ad63e671783a2b91a299355d10cefb9de964c36ff3819115aa"}, {file = "selenium-4.41.0.tar.gz", hash = "sha256:003e971f805231ad63e671783a2b91a299355d10cefb9de964c36ff3819115aa"},
] ]
markers = {main = "extra == \"dev\""}
[package.dependencies] [package.dependencies]
certifi = ">=2026.1.4" certifi = ">=2026.1.4"
@@ -7291,11 +7317,12 @@ version = "2.4.0"
description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set" description = "Sorted Containers -- Sorted List, Sorted Dict, Sorted Set"
optional = false optional = false
python-versions = "*" python-versions = "*"
groups = ["dev"] groups = ["main", "dev"]
files = [ files = [
{file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"}, {file = "sortedcontainers-2.4.0-py2.py3-none-any.whl", hash = "sha256:a163dcaede0f1c021485e957a39245190e74249897e2ae4b2aa38595db237ee0"},
{file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"}, {file = "sortedcontainers-2.4.0.tar.gz", hash = "sha256:25caa5a06cc30b6b83d11423433f65d1f9d76c4c6a0c90e3379eaa43b9bfdb88"},
] ]
markers = {main = "extra == \"dev\""}
[[package]] [[package]]
name = "sqlalchemy" name = "sqlalchemy"
@@ -7626,11 +7653,12 @@ version = "0.33.0"
description = "A friendly Python library for async concurrency and I/O" description = "A friendly Python library for async concurrency and I/O"
optional = false optional = false
python-versions = ">=3.10" python-versions = ">=3.10"
groups = ["dev"] groups = ["main", "dev"]
files = [ files = [
{file = "trio-0.33.0-py3-none-any.whl", hash = "sha256:3bd5d87f781d9b0192d592aef28691f8951d6c2e41b7e1da4c25cde6c180ae9b"}, {file = "trio-0.33.0-py3-none-any.whl", hash = "sha256:3bd5d87f781d9b0192d592aef28691f8951d6c2e41b7e1da4c25cde6c180ae9b"},
{file = "trio-0.33.0.tar.gz", hash = "sha256:a29b92b73f09d4b48ed249acd91073281a7f1063f09caba5dc70465b5c7aa970"}, {file = "trio-0.33.0.tar.gz", hash = "sha256:a29b92b73f09d4b48ed249acd91073281a7f1063f09caba5dc70465b5c7aa970"},
] ]
markers = {main = "extra == \"dev\""}
[package.dependencies] [package.dependencies]
attrs = ">=23.2.0" attrs = ">=23.2.0"
@@ -7646,11 +7674,12 @@ version = "0.12.2"
description = "WebSocket library for Trio" description = "WebSocket library for Trio"
optional = false optional = false
python-versions = ">=3.8" python-versions = ">=3.8"
groups = ["dev"] groups = ["main", "dev"]
files = [ files = [
{file = "trio_websocket-0.12.2-py3-none-any.whl", hash = "sha256:df605665f1db533f4a386c94525870851096a223adcb97f72a07e8b4beba45b6"}, {file = "trio_websocket-0.12.2-py3-none-any.whl", hash = "sha256:df605665f1db533f4a386c94525870851096a223adcb97f72a07e8b4beba45b6"},
{file = "trio_websocket-0.12.2.tar.gz", hash = "sha256:22c72c436f3d1e264d0910a3951934798dcc5b00ae56fc4ee079d46c7cf20fae"}, {file = "trio_websocket-0.12.2.tar.gz", hash = "sha256:22c72c436f3d1e264d0910a3951934798dcc5b00ae56fc4ee079d46c7cf20fae"},
] ]
markers = {main = "extra == \"dev\""}
[package.dependencies] [package.dependencies]
outcome = ">=1.2.0" outcome = ">=1.2.0"
@@ -7983,11 +8012,12 @@ version = "1.9.0"
description = "WebSocket client for Python with low level API options" description = "WebSocket client for Python with low level API options"
optional = false optional = false
python-versions = ">=3.9" python-versions = ">=3.9"
groups = ["dev"] groups = ["main", "dev"]
files = [ files = [
{file = "websocket_client-1.9.0-py3-none-any.whl", hash = "sha256:af248a825037ef591efbf6ed20cc5faa03d3b47b9e5a2230a529eeee1c1fc3ef"}, {file = "websocket_client-1.9.0-py3-none-any.whl", hash = "sha256:af248a825037ef591efbf6ed20cc5faa03d3b47b9e5a2230a529eeee1c1fc3ef"},
{file = "websocket_client-1.9.0.tar.gz", hash = "sha256:9e813624b6eb619999a97dc7958469217c3176312b3a16a4bd1bc7e08a46ec98"}, {file = "websocket_client-1.9.0.tar.gz", hash = "sha256:9e813624b6eb619999a97dc7958469217c3176312b3a16a4bd1bc7e08a46ec98"},
] ]
markers = {main = "extra == \"dev\""}
[package.extras] [package.extras]
docs = ["Sphinx (>=6.0)", "myst-parser (>=2.0.0)", "sphinx_rtd_theme (>=1.1.0)"] docs = ["Sphinx (>=6.0)", "myst-parser (>=2.0.0)", "sphinx_rtd_theme (>=1.1.0)"]
@@ -8071,11 +8101,12 @@ version = "1.3.2"
description = "Pure-Python WebSocket protocol implementation" description = "Pure-Python WebSocket protocol implementation"
optional = false optional = false
python-versions = ">=3.10" python-versions = ">=3.10"
groups = ["dev"] groups = ["main", "dev"]
files = [ files = [
{file = "wsproto-1.3.2-py3-none-any.whl", hash = "sha256:61eea322cdf56e8cc904bd3ad7573359a242ba65688716b0710a5eb12beab584"}, {file = "wsproto-1.3.2-py3-none-any.whl", hash = "sha256:61eea322cdf56e8cc904bd3ad7573359a242ba65688716b0710a5eb12beab584"},
{file = "wsproto-1.3.2.tar.gz", hash = "sha256:b86885dcf294e15204919950f666e06ffc6c7c114ca900b060d6e16293528294"}, {file = "wsproto-1.3.2.tar.gz", hash = "sha256:b86885dcf294e15204919950f666e06ffc6c7c114ca900b060d6e16293528294"},
] ]
markers = {main = "extra == \"dev\""}
[package.dependencies] [package.dependencies]
h11 = ">=0.16.0,<1" h11 = ">=0.16.0,<1"
@@ -8228,6 +8259,7 @@ propcache = ">=0.2.1"
[extras] [extras]
bigbrain = ["airllm"] bigbrain = ["airllm"]
dev = ["pytest", "pytest-asyncio", "pytest-cov", "pytest-randomly", "pytest-timeout", "selenium"]
discord = ["discord.py"] discord = ["discord.py"]
swarm = ["redis"] swarm = ["redis"]
telegram = ["python-telegram-bot"] telegram = ["python-telegram-bot"]
@@ -8236,4 +8268,4 @@ voice = ["pyttsx3"]
[metadata] [metadata]
lock-version = "2.1" lock-version = "2.1"
python-versions = ">=3.11,<4" python-versions = ">=3.11,<4"
content-hash = "546e3cc56929a6b988223fbc685fdb61468fbe5a50249be624742edca30f137e" content-hash = "8e608d71fafb99eda990a90f7879127522ec03fcd2bd34b115d2b4fde4c0fe87"

View File

@@ -54,6 +54,7 @@ pytest-asyncio = { version = ">=0.24.0", optional = true }
pytest-cov = { version = ">=5.0.0", optional = true } pytest-cov = { version = ">=5.0.0", optional = true }
pytest-timeout = { version = ">=2.3.0", optional = true } pytest-timeout = { version = ">=2.3.0", optional = true }
selenium = { version = ">=4.20.0", optional = true } selenium = { version = ">=4.20.0", optional = true }
pytest-randomly = { version = ">=3.16.0", optional = true }
[tool.poetry.extras] [tool.poetry.extras]
swarm = ["redis"] swarm = ["redis"]
@@ -61,7 +62,7 @@ telegram = ["python-telegram-bot"]
discord = ["discord.py"] discord = ["discord.py"]
bigbrain = ["airllm"] bigbrain = ["airllm"]
voice = ["pyttsx3"] voice = ["pyttsx3"]
dev = ["pytest", "pytest-asyncio", "pytest-cov", "pytest-timeout", "selenium"] dev = ["pytest", "pytest-asyncio", "pytest-cov", "pytest-timeout", "pytest-randomly", "selenium"]
[tool.poetry.group.dev.dependencies] [tool.poetry.group.dev.dependencies]
pytest = ">=8.0.0" pytest = ">=8.0.0"
@@ -69,6 +70,7 @@ pytest-asyncio = ">=0.24.0"
pytest-cov = ">=5.0.0" pytest-cov = ">=5.0.0"
pytest-timeout = ">=2.3.0" pytest-timeout = ">=2.3.0"
selenium = ">=4.20.0" selenium = ">=4.20.0"
pytest-randomly = "^4.0.1"
[tool.poetry.scripts] [tool.poetry.scripts]
timmy = "timmy.cli:main" timmy = "timmy.cli:main"
@@ -85,9 +87,15 @@ addopts = "-v --tb=short --timeout=30"
markers = [ markers = [
"unit: Unit tests (fast, no I/O)", "unit: Unit tests (fast, no I/O)",
"integration: Integration tests (may use SQLite)", "integration: Integration tests (may use SQLite)",
"functional: Functional tests (real HTTP requests, no mocking)",
"e2e: End-to-end tests (full system, may be slow)",
"dashboard: Dashboard route tests", "dashboard: Dashboard route tests",
"swarm: Swarm coordinator tests", "swarm: Swarm coordinator tests",
"slow: Tests that take >1 second", "slow: Tests that take >1 second",
"selenium: Requires Selenium and Chrome (browser automation)",
"docker: Requires Docker and docker-compose",
"ollama: Requires Ollama service running",
"skip_ci: Skip in CI environment (local development only)",
] ]
[tool.coverage.run] [tool.coverage.run]

View File

@@ -186,6 +186,32 @@ def db_connection():
# ── Additional Clean Test Fixtures ────────────────────────────────────────── # ── Additional Clean Test Fixtures ──────────────────────────────────────────
@pytest.fixture(autouse=True)
def tmp_swarm_db(tmp_path, monkeypatch):
"""Point all swarm SQLite paths to a temp directory for test isolation.
This is the single source of truth — individual test files should NOT
redefine this fixture. All eight swarm modules that carry a module-level
DB_PATH are patched here so every test gets a clean, ephemeral database.
"""
db_path = tmp_path / "swarm.db"
for module in [
"swarm.tasks",
"swarm.registry",
"swarm.stats",
"swarm.learner",
"swarm.routing",
"swarm.event_log",
"swarm.task_queue.models",
"swarm.work_orders.models",
]:
try:
monkeypatch.setattr(f"{module}.DB_PATH", db_path)
except AttributeError:
pass # Module may not be importable in minimal test envs
yield db_path
@pytest.fixture @pytest.fixture
def mock_ollama_client(): def mock_ollama_client():
"""Provide a mock Ollama client for unit tests.""" """Provide a mock Ollama client for unit tests."""

View File

@@ -7,16 +7,6 @@ import pytest
from fastapi.testclient import TestClient from fastapi.testclient import TestClient
@pytest.fixture(autouse=True)
def tmp_swarm_db(tmp_path, monkeypatch):
"""Point swarm SQLite to a temp directory for test isolation."""
db_path = tmp_path / "swarm.db"
monkeypatch.setattr("swarm.tasks.DB_PATH", db_path)
monkeypatch.setattr("swarm.registry.DB_PATH", db_path)
monkeypatch.setattr("swarm.stats.DB_PATH", db_path)
yield db_path
@pytest.fixture @pytest.fixture
def client(): def client():
from dashboard.app import app from dashboard.app import app

View File

@@ -1,175 +1,120 @@
"""End-to-end tests for Docker deployment. """End-to-end tests for Docker deployment.
These tests verify that the Dockerized application starts correctly, These tests verify that Dockerfiles and compose configs are present,
responds to requests, and all services are properly orchestrated. syntactically valid, and declare the expected services and settings.
""" """
import pytest
import subprocess
import time
import requests
import json import json
import subprocess
import pytest
from pathlib import Path from pathlib import Path
PROJECT_ROOT = Path(__file__).parent.parent.parent
@pytest.fixture(scope="module")
def docker_compose_file():
"""Return the path to the docker-compose file."""
return Path(__file__).parent.parent.parent / "docker-compose.enhanced.yml"
@pytest.fixture(scope="module")
def docker_services_running(docker_compose_file):
"""Start Docker services for testing."""
if not docker_compose_file.exists():
pytest.skip("docker-compose.enhanced.yml not found")
# Start services
result = subprocess.run(
["docker", "compose", "-f", str(docker_compose_file), "up", "-d"],
capture_output=True,
text=True,
)
if result.returncode != 0:
pytest.skip(f"Failed to start Docker services: {result.stderr}")
# Wait for services to be ready
time.sleep(10)
yield
# Cleanup
subprocess.run(
["docker", "compose", "-f", str(docker_compose_file), "down"],
capture_output=True,
)
@pytest.mark.skipif( @pytest.mark.skipif(
subprocess.run(["which", "docker"], capture_output=True).returncode != 0, subprocess.run(["which", "docker"], capture_output=True).returncode != 0,
reason="Docker not installed" reason="Docker not installed",
) )
def test_docker_compose_file_exists(): class TestDockerComposeFiles:
"""Test that docker-compose.enhanced.yml exists.""" """Validate that all compose files exist and parse cleanly."""
compose_file = Path(__file__).parent.parent.parent / "docker-compose.enhanced.yml"
assert compose_file.exists(), "docker-compose.enhanced.yml should exist" def test_base_compose_exists(self):
assert (PROJECT_ROOT / "docker-compose.yml").exists()
def test_dev_overlay_exists(self):
assert (PROJECT_ROOT / "docker-compose.dev.yml").exists()
def test_prod_compose_exists(self):
assert (PROJECT_ROOT / "docker-compose.prod.yml").exists()
def test_test_compose_exists(self):
assert (PROJECT_ROOT / "docker-compose.test.yml").exists()
def test_microservices_compose_exists(self):
assert (PROJECT_ROOT / "docker-compose.microservices.yml").exists()
def test_base_compose_syntax(self):
result = subprocess.run(
["docker", "compose", "-f", str(PROJECT_ROOT / "docker-compose.yml"), "config"],
capture_output=True,
text=True,
)
assert result.returncode == 0, f"Docker Compose syntax error: {result.stderr}"
def test_microservices_compose_services_defined(self):
result = subprocess.run(
[
"docker", "compose",
"-f", str(PROJECT_ROOT / "docker-compose.microservices.yml"),
"config", "--format", "json",
],
capture_output=True,
text=True,
)
assert result.returncode == 0, f"Config error: {result.stderr}"
config = json.loads(result.stdout)
services = config.get("services", {})
assert "ollama" in services, "ollama service should be defined"
assert "dashboard" in services, "dashboard service should be defined"
assert "timmy" in services, "timmy service should be defined"
def test_microservices_compose_content(self):
content = (PROJECT_ROOT / "docker-compose.microservices.yml").read_text()
assert "ollama" in content
assert "dashboard" in content
assert "timmy" in content
assert "timmy-net" in content
assert "ollama-data" in content
assert "timmy-data" in content
def test_test_compose_has_test_runner(self):
content = (PROJECT_ROOT / "docker-compose.test.yml").read_text()
assert "test:" in content, "Test compose should define a 'test' service"
assert "TIMMY_TEST_MODE" in content
assert "pytest" in content
@pytest.mark.skipif( class TestDockerfiles:
subprocess.run(["which", "docker"], capture_output=True).returncode != 0, """Validate the primary Dockerfile and specialised images."""
reason="Docker not installed"
) def test_dockerfile_exists(self):
def test_docker_compose_syntax(): assert (PROJECT_ROOT / "Dockerfile").exists()
"""Test that docker-compose file has valid syntax."""
compose_file = Path(__file__).parent.parent.parent / "docker-compose.enhanced.yml" def test_dockerfile_ollama_exists(self):
assert (PROJECT_ROOT / "docker" / "Dockerfile.ollama").exists()
result = subprocess.run(
["docker", "compose", "-f", str(compose_file), "config"], def test_dockerfile_agent_exists(self):
capture_output=True, assert (PROJECT_ROOT / "docker" / "Dockerfile.agent").exists()
text=True,
def test_dockerfile_dashboard_exists(self):
assert (PROJECT_ROOT / "docker" / "Dockerfile.dashboard").exists()
def test_dockerfile_test_exists(self):
assert (PROJECT_ROOT / "docker" / "Dockerfile.test").exists()
def test_dockerfile_health_check(self):
content = (PROJECT_ROOT / "Dockerfile").read_text()
assert "HEALTHCHECK" in content, "Dockerfile should include HEALTHCHECK"
assert "/health" in content
def test_dockerfile_non_root_user(self):
content = (PROJECT_ROOT / "Dockerfile").read_text()
assert "USER timmy" in content
assert "groupadd -r timmy" in content
@pytest.mark.skipif(
subprocess.run(["which", "docker"], capture_output=True).returncode != 0,
reason="Docker not installed",
) )
def test_docker_image_build(self):
assert result.returncode == 0, f"Docker Compose syntax error: {result.stderr}" result = subprocess.run(
["docker", "build", "-t", "timmy-time:test", "."],
cwd=PROJECT_ROOT,
@pytest.mark.skipif( capture_output=True,
subprocess.run(["which", "docker"], capture_output=True).returncode != 0, text=True,
reason="Docker not installed" timeout=300,
) )
def test_dockerfile_exists(): if result.returncode != 0:
"""Test that Dockerfile exists.""" pytest.skip(f"Docker build failed: {result.stderr}")
dockerfile = Path(__file__).parent.parent.parent / "Dockerfile"
assert dockerfile.exists(), "Dockerfile should exist"
@pytest.mark.skipif(
subprocess.run(["which", "docker"], capture_output=True).returncode != 0,
reason="Docker not installed"
)
def test_dockerfile_ollama_exists():
"""Test that Dockerfile.ollama exists."""
dockerfile = Path(__file__).parent.parent.parent / "Dockerfile.ollama"
assert dockerfile.exists(), "Dockerfile.ollama should exist"
@pytest.mark.skipif(
subprocess.run(["which", "docker"], capture_output=True).returncode != 0,
reason="Docker not installed"
)
def test_docker_image_build():
"""Test that the Docker image can be built."""
result = subprocess.run(
["docker", "build", "-t", "timmy-time:test", "."],
cwd=Path(__file__).parent.parent.parent,
capture_output=True,
text=True,
timeout=300,
)
# Don't fail if build fails, just skip
if result.returncode != 0:
pytest.skip(f"Docker build failed: {result.stderr}")
@pytest.mark.skipif(
subprocess.run(["which", "docker"], capture_output=True, shell=True).returncode != 0,
reason="Docker not installed"
)
def test_docker_compose_services_defined():
"""Test that docker-compose defines all required services."""
compose_file = Path(__file__).parent.parent.parent / "docker-compose.enhanced.yml"
result = subprocess.run(
["docker", "compose", "-f", str(compose_file), "config"],
capture_output=True,
text=True,
)
assert result.returncode == 0, "Docker Compose config should be valid"
config = json.loads(result.stdout)
services = config.get("services", {})
# Check for required services
assert "ollama" in services, "ollama service should be defined"
assert "dashboard" in services, "dashboard service should be defined"
assert "timmy" in services, "timmy service should be defined"
def test_docker_compose_enhanced_yml_content():
"""Test that docker-compose.enhanced.yml has correct configuration."""
compose_file = Path(__file__).parent.parent.parent / "docker-compose.enhanced.yml"
with open(compose_file) as f:
content = f.read()
# Check for key configurations
assert "ollama" in content, "Should reference ollama service"
assert "dashboard" in content, "Should reference dashboard service"
assert "timmy" in content, "Should reference timmy agent"
assert "swarm-net" in content, "Should define swarm network"
assert "ollama-data" in content, "Should define ollama-data volume"
assert "timmy-data" in content, "Should define timmy-data volume"
def test_dockerfile_health_check():
"""Test that Dockerfile includes health check."""
dockerfile = Path(__file__).parent.parent.parent / "Dockerfile"
with open(dockerfile) as f:
content = f.read()
assert "HEALTHCHECK" in content, "Dockerfile should include HEALTHCHECK"
assert "/health" in content, "Health check should use /health endpoint"
def test_dockerfile_non_root_user():
"""Test that Dockerfile runs as non-root user."""
dockerfile = Path(__file__).parent.parent.parent / "Dockerfile"
with open(dockerfile) as f:
content = f.read()
assert "USER timmy" in content, "Dockerfile should run as non-root user"
assert "groupadd -r timmy" in content, "Dockerfile should create timmy user"

View File

@@ -8,17 +8,6 @@ import pytest
from unittest.mock import AsyncMock, patch from unittest.mock import AsyncMock, patch
@pytest.fixture(autouse=True)
def tmp_swarm_db(tmp_path, monkeypatch):
"""Point swarm SQLite to a temp directory for test isolation."""
db_path = tmp_path / "swarm.db"
monkeypatch.setattr("swarm.tasks.DB_PATH", db_path)
monkeypatch.setattr("swarm.registry.DB_PATH", db_path)
monkeypatch.setattr("swarm.stats.DB_PATH", db_path)
monkeypatch.setattr("swarm.learner.DB_PATH", db_path)
yield db_path
# ── Coordinator: Agent lifecycle ───────────────────────────────────────────── # ── Coordinator: Agent lifecycle ─────────────────────────────────────────────
def test_coordinator_spawn_agent(): def test_coordinator_spawn_agent():

View File

@@ -10,14 +10,6 @@ import pytest
# ── Tasks CRUD ─────────────────────────────────────────────────────────────── # ── Tasks CRUD ───────────────────────────────────────────────────────────────
@pytest.fixture(autouse=True)
def tmp_swarm_db(tmp_path, monkeypatch):
"""Point swarm SQLite to a temp directory for test isolation."""
db_path = tmp_path / "swarm.db"
monkeypatch.setattr("swarm.tasks.DB_PATH", db_path)
monkeypatch.setattr("swarm.registry.DB_PATH", db_path)
yield db_path
def test_create_task(): def test_create_task():
from swarm.tasks import create_task from swarm.tasks import create_task

View File

@@ -7,15 +7,6 @@ import pytest
from unittest.mock import MagicMock, patch from unittest.mock import MagicMock, patch
@pytest.fixture(autouse=True)
def tmp_swarm_db(tmp_path, monkeypatch):
"""Point swarm SQLite to a temp directory for test isolation."""
db_path = tmp_path / "swarm.db"
monkeypatch.setattr("swarm.tasks.DB_PATH", db_path)
monkeypatch.setattr("swarm.registry.DB_PATH", db_path)
yield db_path
def _make_node(agent_id="node-1", name="TestNode"): def _make_node(agent_id="node-1", name="TestNode"):
from swarm.comms import SwarmComms from swarm.comms import SwarmComms
from swarm.swarm_node import SwarmNode from swarm.swarm_node import SwarmNode

View File

@@ -4,18 +4,6 @@ import pytest
from unittest.mock import MagicMock from unittest.mock import MagicMock
# ── Fixture: redirect SQLite DB to a temp directory ──────────────────────────
@pytest.fixture(autouse=True)
def tmp_swarm_db(tmp_path, monkeypatch):
db_path = tmp_path / "swarm.db"
monkeypatch.setattr("swarm.tasks.DB_PATH", db_path)
monkeypatch.setattr("swarm.registry.DB_PATH", db_path)
monkeypatch.setattr("swarm.stats.DB_PATH", db_path)
monkeypatch.setattr("swarm.learner.DB_PATH", db_path)
yield db_path
# ── personas.py ─────────────────────────────────────────────────────────────── # ── personas.py ───────────────────────────────────────────────────────────────
def test_all_nine_personas_defined(): def test_all_nine_personas_defined():

View File

@@ -3,16 +3,6 @@
import pytest import pytest
@pytest.fixture(autouse=True)
def tmp_swarm_db(tmp_path, monkeypatch):
"""Isolate SQLite writes to a temp directory."""
db = tmp_path / "swarm.db"
monkeypatch.setattr("swarm.tasks.DB_PATH", db)
monkeypatch.setattr("swarm.registry.DB_PATH", db)
monkeypatch.setattr("swarm.stats.DB_PATH", db)
yield db
# ── reconcile_on_startup: return shape ─────────────────────────────────────── # ── reconcile_on_startup: return shape ───────────────────────────────────────
def test_reconcile_returns_summary_keys(): def test_reconcile_returns_summary_keys():

View File

@@ -3,13 +3,6 @@
import pytest import pytest
@pytest.fixture(autouse=True)
def tmp_swarm_db(tmp_path, monkeypatch):
db_path = tmp_path / "swarm.db"
monkeypatch.setattr("swarm.stats.DB_PATH", db_path)
yield db_path
# ── record_bid ──────────────────────────────────────────────────────────────── # ── record_bid ────────────────────────────────────────────────────────────────
def test_record_bid_returns_id(): def test_record_bid_returns_id():

View File

@@ -11,14 +11,6 @@ from unittest.mock import AsyncMock, MagicMock, patch
import pytest import pytest
@pytest.fixture(autouse=True)
def tmp_swarm_db(tmp_path, monkeypatch):
db_path = tmp_path / "swarm.db"
monkeypatch.setattr("swarm.tasks.DB_PATH", db_path)
monkeypatch.setattr("swarm.registry.DB_PATH", db_path)
yield db_path
def test_agent_runner_module_is_importable(): def test_agent_runner_module_is_importable():
"""The agent_runner module should import without errors.""" """The agent_runner module should import without errors."""
import swarm.agent_runner import swarm.agent_runner