Adds the `bigbrain` optional dependency group (airllm>=2.9.0) and a
complete second inference path that runs 8B / 70B / 405B Llama models
locally via layer-by-layer loading — no GPU required, no cloud, fully
sovereign.
Key changes:
- src/timmy/backends.py — TimmyAirLLMAgent (same print_response interface
as Agno Agent); auto-selects AirLLMMLX on Apple
Silicon, AutoModel (PyTorch) everywhere else
- src/timmy/agent.py — _resolve_backend() routing with explicit override,
env-config, and 'auto' Apple-Silicon detection
- src/timmy/cli.py — --backend / --model-size flags on all commands
- src/config.py — timmy_model_backend + airllm_model_size settings
- src/timmy/prompts.py — mentions AirLLM "even bigger brains, still fully
sovereign"
- pyproject.toml — bigbrain optional dep; wheel includes updated
- .env.example — TIMMY_MODEL_BACKEND + AIRLLM_MODEL_SIZE docs
- tests/conftest.py — stubs 'airllm' module so tests run without GPU
- tests/test_backends.py — 13 new tests covering helpers + TimmyAirLLMAgent
- tests/test_agent.py — 7 new tests for backend routing
- README.md — Big Brain section with one-line install
- activate_self_tdd.sh — bootstrap script (venv + install + tests +
watchdog + dashboard); --big-brain flag
All 61 tests pass. Self-TDD watchdog unaffected.
https://claude.ai/code/session_01DMjQ5qMZ8iHeyix1j3GS7c
55 lines
1.2 KiB
TOML
55 lines
1.2 KiB
TOML
[build-system]
|
|
requires = ["hatchling"]
|
|
build-backend = "hatchling.build"
|
|
|
|
[project]
|
|
name = "timmy-time"
|
|
version = "1.0.0"
|
|
description = "Mission Control for sovereign AI agents"
|
|
readme = "README.md"
|
|
requires-python = ">=3.11"
|
|
license = { text = "MIT" }
|
|
dependencies = [
|
|
"agno>=1.4.0",
|
|
"fastapi>=0.115.0",
|
|
"uvicorn[standard]>=0.32.0",
|
|
"jinja2>=3.1.0",
|
|
"httpx>=0.27.0",
|
|
"python-multipart>=0.0.12",
|
|
"aiofiles>=24.0.0",
|
|
"typer>=0.12.0",
|
|
"rich>=13.0.0",
|
|
"pydantic-settings>=2.0.0",
|
|
]
|
|
|
|
[project.optional-dependencies]
|
|
dev = [
|
|
"pytest>=8.0.0",
|
|
"pytest-asyncio>=0.24.0",
|
|
"pytest-cov>=5.0.0",
|
|
]
|
|
# Big-brain: run 8B / 70B / 405B models locally via layer-by-layer loading.
|
|
# pip install ".[bigbrain]"
|
|
# On Apple Silicon: pip install "airllm[mlx]" for the MLX-accelerated backend.
|
|
bigbrain = [
|
|
"airllm>=2.9.0",
|
|
]
|
|
|
|
[project.scripts]
|
|
timmy = "timmy.cli:main"
|
|
self-tdd = "self_tdd.watchdog:main"
|
|
|
|
[tool.hatch.build.targets.wheel]
|
|
sources = {"src" = ""}
|
|
include = ["src/timmy", "src/dashboard", "src/config.py", "src/self_tdd"]
|
|
|
|
[tool.pytest.ini_options]
|
|
testpaths = ["tests"]
|
|
pythonpath = ["src"]
|
|
asyncio_mode = "auto"
|
|
addopts = "-v --tb=short"
|
|
|
|
[tool.coverage.run]
|
|
source = ["src"]
|
|
omit = ["*/tests/*"]
|