diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 13714594..dfe03211 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -2,9 +2,9 @@ name: Tests on: push: - branches: ["**"] + branches: [main] pull_request: - branches: ["**"] + branches: [main] jobs: lint: @@ -17,7 +17,7 @@ jobs: python-version: "3.11" - name: Install linters - run: pip install black==23.12.1 isort==5.13.2 bandit==1.7.5 + run: pip install black==23.12.1 isort==5.13.2 bandit==1.8.0 - name: Check formatting (black) run: black --check --line-length 100 src/ tests/ diff --git a/Dockerfile b/Dockerfile index 1542baf7..0b13d656 100644 --- a/Dockerfile +++ b/Dockerfile @@ -28,7 +28,7 @@ COPY pyproject.toml poetry.lock ./ # Install deps directly from lock file (no virtualenv, no export plugin needed) RUN poetry config virtualenvs.create false && \ - poetry install --only main --extras telegram --extras discord --no-interaction + poetry install --only main --extras telegram --extras discord --no-root --no-interaction # ── Stage 2: Runtime ─────────────────────────────────────────────────────── FROM python:3.12-slim AS base diff --git a/scripts/pre-commit-hook.sh b/scripts/pre-commit-hook.sh index c12e2d80..89a8d3bd 100755 --- a/scripts/pre-commit-hook.sh +++ b/scripts/pre-commit-hook.sh @@ -1,8 +1,13 @@ #!/usr/bin/env bash -# Pre-commit hook: run tests with a wall-clock limit. -# Blocks the commit if tests fail or take too long. +# Pre-commit hook: lint + test with a wall-clock limit. +# Blocks the commit if formatting, imports, or tests fail. # Current baseline: ~18s wall-clock. Limit set to 30s for headroom. +echo "Auto-formatting (black + isort)..." +poetry run python -m black --line-length 100 src/ tests/ --quiet +poetry run isort --profile black --line-length 100 src/ tests/ --quiet 2>/dev/null +git add -u + MAX_SECONDS=30 echo "Running tests (${MAX_SECONDS}s limit)..." diff --git a/src/dashboard/models/database.py b/src/dashboard/models/database.py index 8a5b9143..03b972de 100644 --- a/src/dashboard/models/database.py +++ b/src/dashboard/models/database.py @@ -1,9 +1,18 @@ +import logging +from pathlib import Path + from sqlalchemy import create_engine +from sqlalchemy.exc import OperationalError from sqlalchemy.ext.declarative import declarative_base from sqlalchemy.orm import Session, sessionmaker +logger = logging.getLogger(__name__) + SQLALCHEMY_DATABASE_URL = "sqlite:///./data/timmy_calm.db" +# Ensure the data directory exists before creating the engine +Path("./data").mkdir(parents=True, exist_ok=True) + engine = create_engine(SQLALCHEMY_DATABASE_URL, connect_args={"check_same_thread": False}) SessionLocal = sessionmaker(autocommit=False, autoflush=False, bind=engine) @@ -12,8 +21,11 @@ Base = declarative_base() def create_tables(): - """Create all tables defined by models that have imported Base.""" - Base.metadata.create_all(bind=engine) + """Create all tables idempotently (safe under pytest-xdist concurrency).""" + try: + Base.metadata.create_all(bind=engine) + except OperationalError as exc: + logger.debug("Table creation skipped (already exists): %s", exc) def get_db(): diff --git a/tests/dashboard/test_calm.py b/tests/dashboard/test_calm.py index d353b1e4..0ba65bc7 100644 --- a/tests/dashboard/test_calm.py +++ b/tests/dashboard/test_calm.py @@ -235,3 +235,22 @@ def test_reorder_promote_later_to_next(client: TestClient, db_session: Session): assert db_session.query(Task).filter(Task.id == task_now.id).first().state == TaskState.NOW assert db_session.query(Task).filter(Task.id == task_later1.id).first().state == TaskState.NEXT assert db_session.query(Task).filter(Task.id == task_later2.id).first().state == TaskState.LATER + + +def test_create_tables_idempotent_under_concurrency(): + """Calling create_tables() when tables already exist must not crash. + + This covers the race where multiple pytest-xdist workers (or app + processes) import the calm routes module simultaneously and each + calls create_tables() against the same SQLite file. + """ + from unittest.mock import patch + + from sqlalchemy.exc import OperationalError + + from dashboard.models.database import create_tables + + fake_error = OperationalError("CREATE TABLE", {}, Exception("table tasks already exists")) + with patch("dashboard.models.database.Base.metadata.create_all", side_effect=fake_error): + # Must not raise — the OperationalError is caught and logged + create_tables()