intelligence(deepdive): Docker deployment scaffold for #830
Some checks failed
Deploy Nexus / deploy (push) Has been cancelled

- Add Dockerfile for production containerized pipeline
- Add docker-compose.yml for full stack deployment
- Add .dockerignore for clean builds
- Add deploy.sh: one-command build, test, and systemd timer install

This provides a sovereign, reproducible deployment path for the
Deep Dive daily briefing pipeline.
This commit is contained in:
Ezra (Archivist)
2026-04-05 20:40:29 +00:00
parent 2daedfb2a0
commit d2f103654f
4 changed files with 250 additions and 0 deletions

View File

@@ -0,0 +1,30 @@
# Deep Dive Docker Ignore
__pycache__/
*.pyc
*.pyo
*.pyd
.Python
*.so
*.egg
*.egg-info/
dist/
build/
.cache/
.pytest_cache/
.mypy_cache/
.coverage
htmlcov/
.env
.venv/
venv/
*.log
.cache/deepdive/
output/
audio/
*.mp3
*.wav
*.ogg
.git/
.gitignore
.github/
.gitea/

View File

@@ -0,0 +1,42 @@
# Deep Dive Intelligence Pipeline — Production Container
# Issue: #830 — Sovereign NotebookLM Daily Briefing
#
# Build:
# docker build -t deepdive:latest .
# Run dry-run:
# docker run --rm -v $(pwd)/config.yaml:/app/config.yaml deepdive:latest --dry-run
FROM python:3.11-slim
# Install system dependencies
RUN apt-get update && apt-get install -y --no-install-recommends \
ffmpeg \
wget \
curl \
ca-certificates \
git \
&& rm -rf /var/lib/apt/lists/*
WORKDIR /app
# Install Python dependencies first (layer caching)
COPY requirements.txt .
RUN pip install --no-cache-dir -r requirements.txt
# Pre-download embedding model for faster cold starts
RUN python3 -c "from sentence_transformers import SentenceTransformer; SentenceTransformer('all-MiniLM-L6-v2')"
# Copy application code
COPY pipeline.py tts_engine.py fleet_context.py telegram_command.py quality_eval.py ./
COPY prompts/ ./prompts/
COPY tests/ ./tests/
COPY Makefile README.md QUICKSTART.md OPERATIONAL_READINESS.md ./
# Create cache and output directories
RUN mkdir -p /app/cache /app/output
ENV DEEPDIVE_CACHE_DIR=/app/cache
ENV PYTHONUNBUFFERED=1
# Default: run pipeline with mounted config
ENTRYPOINT ["python3", "pipeline.py", "--config", "/app/config.yaml"]
CMD ["--dry-run"]

124
intelligence/deepdive/deploy.sh Executable file
View File

@@ -0,0 +1,124 @@
#!/usr/bin/env bash
# deploy.sh — One-command Deep Dive deployment
# Issue: #830 — Sovereign NotebookLM Daily Briefing
#
# Usage:
# ./deploy.sh --dry-run # Build + test only
# ./deploy.sh --live # Build + install daily timer
set -euo pipefail
SCRIPT_DIR="$(cd "$(dirname "${BASH_SOURCE[0]}")" && pwd)"
COMPOSE_FILE="$SCRIPT_DIR/docker-compose.yml"
MODE="dry-run"
RED='\033[0;31m'
GREEN='\033[0;32m'
YELLOW='\033[1;33m'
NC='\033[0m'
pass() { echo -e "${GREEN}[PASS]${NC} $*"; }
fail() { echo -e "${RED}[FAIL]${NC} $*"; }
info() { echo -e "${YELLOW}[INFO]${NC} $*"; }
usage() {
echo "Usage: $0 [--dry-run | --live]"
echo " --dry-run Build image and run a dry-run test (default)"
echo " --live Build image, run test, and install systemd timer"
exit 1
}
if [[ $# -gt 0 ]]; then
case "$1" in
--dry-run) MODE="dry-run" ;;
--live) MODE="live" ;;
-h|--help) usage ;;
*) usage ;;
esac
fi
info "=================================================="
info "Deep Dive Deployment — Issue #830"
info "Mode: $MODE"
info "=================================================="
# --- Prerequisites ---
info "Checking prerequisites..."
if ! command -v docker >/dev/null 2>&1; then
fail "Docker is not installed"
exit 1
fi
pass "Docker installed"
if ! docker compose version >/dev/null 2>&1 && ! docker-compose version >/dev/null 2>&1; then
fail "Docker Compose is not installed"
exit 1
fi
pass "Docker Compose installed"
if [[ ! -f "$SCRIPT_DIR/config.yaml" ]]; then
fail "config.yaml not found in $SCRIPT_DIR"
info "Copy config.yaml.example or create one before deploying."
exit 1
fi
pass "config.yaml exists"
# --- Build ---
info "Building Deep Dive image..."
cd "$SCRIPT_DIR"
docker compose -f "$COMPOSE_FILE" build deepdive
pass "Image built successfully"
# --- Dry-run test ---
info "Running dry-run pipeline test..."
docker compose -f "$COMPOSE_FILE" run --rm deepdive --dry-run --since 48
pass "Dry-run test passed"
# --- Live mode: install timer ---
if [[ "$MODE" == "live" ]]; then
info "Installing daily execution timer..."
SYSTEMD_DIR="$HOME/.config/systemd/user"
mkdir -p "$SYSTEMD_DIR"
# Generate a service that runs via docker compose
cat > "$SYSTEMD_DIR/deepdive.service" <<EOF
[Unit]
Description=Deep Dive Daily Intelligence Briefing
After=docker.service
[Service]
Type=oneshot
WorkingDirectory=$SCRIPT_DIR
ExecStart=/usr/bin/docker compose -f $COMPOSE_FILE run --rm deepdive --today
EOF
cat > "$SYSTEMD_DIR/deepdive.timer" <<EOF
[Unit]
Description=Run Deep Dive daily at 06:00
[Timer]
OnCalendar=*-*-* 06:00:00
Persistent=true
[Install]
WantedBy=timers.target
EOF
systemctl --user daemon-reload
systemctl --user enable deepdive.timer
systemctl --user start deepdive.timer || true
pass "Systemd timer installed and started"
info "Check status: systemctl --user status deepdive.timer"
info "=================================================="
info "Deep Dive is now deployed for live delivery!"
info "=================================================="
else
info "=================================================="
info "Deployment test successful."
info "Run './deploy.sh --live' to enable daily automation."
info "=================================================="
fi

View File

@@ -0,0 +1,54 @@
# Deep Dive — Full Containerized Deployment
# Issue: #830 — Sovereign NotebookLM Daily Briefing
#
# Usage:
# docker compose up -d # Start stack
# docker compose run --rm deepdive --dry-run # Test pipeline
# docker compose run --rm deepdive --today # Live run
#
# For daily automation, use systemd timer or host cron calling:
# docker compose -f /path/to/docker-compose.yml run --rm deepdive --today
services:
deepdive:
build:
context: .
dockerfile: Dockerfile
container_name: deepdive
image: deepdive:latest
volumes:
# Mount your config from host
- ./config.yaml:/app/config.yaml:ro
# Persist cache and outputs
- deepdive-cache:/app/cache
- deepdive-output:/app/output
environment:
- OPENAI_API_KEY=${OPENAI_API_KEY:-}
- ANTHROPIC_API_KEY=${ANTHROPIC_API_KEY:-}
- ELEVENLABS_API_KEY=${ELEVENLABS_API_KEY:-}
- TELEGRAM_BOT_TOKEN=${TELEGRAM_BOT_TOKEN:-}
- TELEGRAM_HOME_CHANNEL=${TELEGRAM_HOME_CHANNEL:-}
- DEEPDIVE_CACHE_DIR=/app/cache
command: ["--dry-run"]
# Optional: attach to Ollama for local LLM inference
# networks:
# - deepdive-net
# Optional: Local LLM backend (uncomment if using local inference)
# ollama:
# image: ollama/ollama:latest
# container_name: deepdive-ollama
# volumes:
# - ollama-models:/root/.ollama
# ports:
# - "11434:11434"
# networks:
# - deepdive-net
volumes:
deepdive-cache:
deepdive-output:
# ollama-models:
# networks:
# deepdive-net: