From 617ef43f99e34dec9287388e274c55895a728e2a Mon Sep 17 00:00:00 2001 From: Alexander Whitestone Date: Mon, 23 Mar 2026 17:57:26 -0400 Subject: [PATCH] =?UTF-8?q?test:=20add=20unit=20tests=20for=20daily=5Frun.?= =?UTF-8?q?py=20=E2=80=94=2051=20tests=20covering=20main=20handlers?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Adds tests/dashboard/test_daily_run.py with 51 test cases covering: - _load_config(): defaults, file loading, env var overrides, invalid JSON - _get_token(): from config dict, from file, missing file - GiteaClient: headers, api_url, is_available (true/false/cached), get_paginated - LayerMetrics: trend and trend_color properties (all directions) - DailyRunMetrics: sessions_trend and sessions_trend_color properties - _extract_layer(): label extraction from issue label lists - _load_cycle_data(): success counting, invalid JSON lines, missing timestamps - _fetch_layer_metrics(): counting logic, graceful degradation on errors - _get_metrics(): unavailable client, happy path, exception handling - Route handlers: /daily-run/metrics (JSON) and /daily-run/panel (HTML) All 51 tests pass. tox -e unit remains green (293 passing). Fixes #1186 Co-Authored-By: Claude Sonnet 4.6 --- tests/dashboard/test_daily_run.py | 530 ++++++++++++++++++++++++++++++ 1 file changed, 530 insertions(+) create mode 100644 tests/dashboard/test_daily_run.py diff --git a/tests/dashboard/test_daily_run.py b/tests/dashboard/test_daily_run.py new file mode 100644 index 00000000..d010b502 --- /dev/null +++ b/tests/dashboard/test_daily_run.py @@ -0,0 +1,530 @@ +"""Unit tests for dashboard/routes/daily_run.py.""" + +from __future__ import annotations + +import json +import os +from datetime import UTC, datetime, timedelta +from pathlib import Path +from unittest.mock import MagicMock, patch +from urllib.error import HTTPError, URLError + +import pytest + +from dashboard.routes.daily_run import ( + DEFAULT_CONFIG, + LAYER_LABELS, + DailyRunMetrics, + GiteaClient, + LayerMetrics, + _extract_layer, + _fetch_layer_metrics, + _get_metrics, + _get_token, + _load_config, + _load_cycle_data, +) + + +# --------------------------------------------------------------------------- +# _load_config +# --------------------------------------------------------------------------- + + +def test_load_config_returns_defaults(): + with patch("dashboard.routes.daily_run.CONFIG_PATH") as mock_path: + mock_path.exists.return_value = False + config = _load_config() + assert config["gitea_api"] == DEFAULT_CONFIG["gitea_api"] + assert config["repo_slug"] == DEFAULT_CONFIG["repo_slug"] + + +def test_load_config_merges_file_orchestrator_section(tmp_path): + config_file = tmp_path / "daily_run.json" + config_file.write_text( + json.dumps({"orchestrator": {"repo_slug": "custom/repo", "gitea_api": "http://custom:3000/api/v1"}}) + ) + with patch("dashboard.routes.daily_run.CONFIG_PATH", config_file): + config = _load_config() + assert config["repo_slug"] == "custom/repo" + assert config["gitea_api"] == "http://custom:3000/api/v1" + + +def test_load_config_ignores_invalid_json(tmp_path): + config_file = tmp_path / "daily_run.json" + config_file.write_text("not valid json{{") + with patch("dashboard.routes.daily_run.CONFIG_PATH", config_file): + config = _load_config() + assert config["repo_slug"] == DEFAULT_CONFIG["repo_slug"] + + +def test_load_config_env_overrides(monkeypatch): + monkeypatch.setenv("TIMMY_GITEA_API", "http://envapi:3000/api/v1") + monkeypatch.setenv("TIMMY_REPO_SLUG", "env/repo") + monkeypatch.setenv("TIMMY_GITEA_TOKEN", "env-token-123") + with patch("dashboard.routes.daily_run.CONFIG_PATH") as mock_path: + mock_path.exists.return_value = False + config = _load_config() + assert config["gitea_api"] == "http://envapi:3000/api/v1" + assert config["repo_slug"] == "env/repo" + assert config["token"] == "env-token-123" + + +def test_load_config_no_env_overrides_without_vars(monkeypatch): + monkeypatch.delenv("TIMMY_GITEA_API", raising=False) + monkeypatch.delenv("TIMMY_REPO_SLUG", raising=False) + monkeypatch.delenv("TIMMY_GITEA_TOKEN", raising=False) + with patch("dashboard.routes.daily_run.CONFIG_PATH") as mock_path: + mock_path.exists.return_value = False + config = _load_config() + assert "token" not in config + + +# --------------------------------------------------------------------------- +# _get_token +# --------------------------------------------------------------------------- + + +def test_get_token_from_config_dict(): + config = {"token": "direct-token", "token_file": "~/.hermes/gitea_token"} + assert _get_token(config) == "direct-token" + + +def test_get_token_from_file(tmp_path): + token_file = tmp_path / "token.txt" + token_file.write_text(" file-token \n") + config = {"token_file": str(token_file)} + assert _get_token(config) == "file-token" + + +def test_get_token_returns_none_when_file_missing(tmp_path): + config = {"token_file": str(tmp_path / "nonexistent_token")} + assert _get_token(config) is None + + +# --------------------------------------------------------------------------- +# GiteaClient +# --------------------------------------------------------------------------- + + +def _make_client(**kwargs) -> GiteaClient: + config = {**DEFAULT_CONFIG, **kwargs} + return GiteaClient(config, token="test-token") + + +def test_gitea_client_headers_include_auth(): + client = _make_client() + headers = client._headers() + assert headers["Authorization"] == "token test-token" + assert headers["Accept"] == "application/json" + + +def test_gitea_client_headers_no_token(): + config = {**DEFAULT_CONFIG} + client = GiteaClient(config, token=None) + headers = client._headers() + assert "Authorization" not in headers + + +def test_gitea_client_api_url(): + client = _make_client() + url = client._api_url("issues") + assert url == f"{DEFAULT_CONFIG['gitea_api']}/repos/{DEFAULT_CONFIG['repo_slug']}/issues" + + +def test_gitea_client_api_url_strips_trailing_slash(): + config = {**DEFAULT_CONFIG, "gitea_api": "http://localhost:3000/api/v1/"} + client = GiteaClient(config, token=None) + url = client._api_url("issues") + assert "//" not in url.replace("http://", "") + + +def test_gitea_client_is_available_true(): + client = _make_client() + mock_resp = MagicMock() + mock_resp.status = 200 + mock_resp.__enter__ = lambda s: mock_resp + mock_resp.__exit__ = MagicMock(return_value=False) + with patch("dashboard.routes.daily_run.urlopen", return_value=mock_resp): + assert client.is_available() is True + + +def test_gitea_client_is_available_cached(): + client = _make_client() + client._available = True + # Should not call urlopen at all + with patch("dashboard.routes.daily_run.urlopen") as mock_urlopen: + assert client.is_available() is True + mock_urlopen.assert_not_called() + + +def test_gitea_client_is_available_false_on_url_error(): + client = _make_client() + with patch("dashboard.routes.daily_run.urlopen", side_effect=URLError("refused")): + assert client.is_available() is False + + +def test_gitea_client_is_available_false_on_timeout(): + client = _make_client() + with patch("dashboard.routes.daily_run.urlopen", side_effect=TimeoutError()): + assert client.is_available() is False + + +def test_gitea_client_get_paginated_single_page(): + client = _make_client() + mock_resp = MagicMock() + mock_resp.read.return_value = json.dumps([{"id": 1}, {"id": 2}]).encode() + mock_resp.__enter__ = lambda s: mock_resp + mock_resp.__exit__ = MagicMock(return_value=False) + with patch("dashboard.routes.daily_run.urlopen", return_value=mock_resp): + result = client.get_paginated("issues") + assert len(result) == 2 + assert result[0]["id"] == 1 + + +def test_gitea_client_get_paginated_empty(): + client = _make_client() + mock_resp = MagicMock() + mock_resp.read.return_value = b"[]" + mock_resp.__enter__ = lambda s: mock_resp + mock_resp.__exit__ = MagicMock(return_value=False) + with patch("dashboard.routes.daily_run.urlopen", return_value=mock_resp): + result = client.get_paginated("issues") + assert result == [] + + +# --------------------------------------------------------------------------- +# LayerMetrics.trend +# --------------------------------------------------------------------------- + + +def test_layer_metrics_trend_no_previous_no_current(): + lm = LayerMetrics(name="triage", label="layer:triage", current_count=0, previous_count=0) + assert lm.trend == "→" + + +def test_layer_metrics_trend_no_previous_with_current(): + lm = LayerMetrics(name="triage", label="layer:triage", current_count=5, previous_count=0) + assert lm.trend == "↑" + + +def test_layer_metrics_trend_big_increase(): + lm = LayerMetrics(name="triage", label="layer:triage", current_count=130, previous_count=100) + assert lm.trend == "↑↑" + + +def test_layer_metrics_trend_small_increase(): + lm = LayerMetrics(name="triage", label="layer:triage", current_count=108, previous_count=100) + assert lm.trend == "↑" + + +def test_layer_metrics_trend_stable(): + lm = LayerMetrics(name="triage", label="layer:triage", current_count=100, previous_count=100) + assert lm.trend == "→" + + +def test_layer_metrics_trend_small_decrease(): + lm = LayerMetrics(name="triage", label="layer:triage", current_count=92, previous_count=100) + assert lm.trend == "↓" + + +def test_layer_metrics_trend_big_decrease(): + lm = LayerMetrics(name="triage", label="layer:triage", current_count=70, previous_count=100) + assert lm.trend == "↓↓" + + +def test_layer_metrics_trend_color_up(): + lm = LayerMetrics(name="triage", label="layer:triage", current_count=200, previous_count=100) + assert lm.trend_color == "var(--green)" + + +def test_layer_metrics_trend_color_down(): + lm = LayerMetrics(name="triage", label="layer:triage", current_count=50, previous_count=100) + assert lm.trend_color == "var(--amber)" + + +def test_layer_metrics_trend_color_stable(): + lm = LayerMetrics(name="triage", label="layer:triage", current_count=100, previous_count=100) + assert lm.trend_color == "var(--text-dim)" + + +# --------------------------------------------------------------------------- +# DailyRunMetrics.sessions_trend +# --------------------------------------------------------------------------- + + +def _make_daily_metrics(**kwargs) -> DailyRunMetrics: + defaults = dict( + sessions_completed=10, + sessions_previous=8, + layers=[], + total_touched_current=20, + total_touched_previous=15, + lookback_days=7, + generated_at=datetime.now(UTC).isoformat(), + ) + defaults.update(kwargs) + return DailyRunMetrics(**defaults) + + +def test_daily_metrics_sessions_trend_big_increase(): + m = _make_daily_metrics(sessions_completed=130, sessions_previous=100) + assert m.sessions_trend == "↑↑" + + +def test_daily_metrics_sessions_trend_stable(): + m = _make_daily_metrics(sessions_completed=100, sessions_previous=100) + assert m.sessions_trend == "→" + + +def test_daily_metrics_sessions_trend_no_previous_zero_completed(): + m = _make_daily_metrics(sessions_completed=0, sessions_previous=0) + assert m.sessions_trend == "→" + + +def test_daily_metrics_sessions_trend_no_previous_with_completed(): + m = _make_daily_metrics(sessions_completed=5, sessions_previous=0) + assert m.sessions_trend == "↑" + + +def test_daily_metrics_sessions_trend_color_green(): + m = _make_daily_metrics(sessions_completed=200, sessions_previous=100) + assert m.sessions_trend_color == "var(--green)" + + +def test_daily_metrics_sessions_trend_color_amber(): + m = _make_daily_metrics(sessions_completed=50, sessions_previous=100) + assert m.sessions_trend_color == "var(--amber)" + + +# --------------------------------------------------------------------------- +# _extract_layer +# --------------------------------------------------------------------------- + + +def test_extract_layer_finds_layer_label(): + labels = [{"name": "bug"}, {"name": "layer:triage"}, {"name": "urgent"}] + assert _extract_layer(labels) == "triage" + + +def test_extract_layer_returns_none_when_no_layer(): + labels = [{"name": "bug"}, {"name": "feature"}] + assert _extract_layer(labels) is None + + +def test_extract_layer_empty_labels(): + assert _extract_layer([]) is None + + +def test_extract_layer_first_match_wins(): + labels = [{"name": "layer:micro-fix"}, {"name": "layer:tests"}] + assert _extract_layer(labels) == "micro-fix" + + +# --------------------------------------------------------------------------- +# _load_cycle_data +# --------------------------------------------------------------------------- + + +def test_load_cycle_data_missing_file(tmp_path): + with patch("dashboard.routes.daily_run.REPO_ROOT", tmp_path): + result = _load_cycle_data(days=14) + assert result == {"current": 0, "previous": 0} + + +def test_load_cycle_data_counts_successful_sessions(tmp_path): + retro_dir = tmp_path / ".loop" / "retro" + retro_dir.mkdir(parents=True) + retro_file = retro_dir / "cycles.jsonl" + + now = datetime.now(UTC) + recent_ts = (now - timedelta(days=3)).isoformat() + older_ts = (now - timedelta(days=10)).isoformat() + old_ts = (now - timedelta(days=20)).isoformat() + + lines = [ + json.dumps({"timestamp": recent_ts, "success": True}), + json.dumps({"timestamp": recent_ts, "success": False}), # not counted + json.dumps({"timestamp": older_ts, "success": True}), + json.dumps({"timestamp": old_ts, "success": True}), # outside window + ] + retro_file.write_text("\n".join(lines)) + + with patch("dashboard.routes.daily_run.REPO_ROOT", tmp_path): + result = _load_cycle_data(days=7) + + assert result["current"] == 1 + assert result["previous"] == 1 + + +def test_load_cycle_data_skips_invalid_json_lines(tmp_path): + retro_dir = tmp_path / ".loop" / "retro" + retro_dir.mkdir(parents=True) + retro_file = retro_dir / "cycles.jsonl" + + now = datetime.now(UTC) + recent_ts = (now - timedelta(days=1)).isoformat() + retro_file.write_text( + f'not valid json\n{json.dumps({"timestamp": recent_ts, "success": True})}\n' + ) + + with patch("dashboard.routes.daily_run.REPO_ROOT", tmp_path): + result = _load_cycle_data(days=7) + + assert result["current"] == 1 + + +def test_load_cycle_data_skips_entries_with_no_timestamp(tmp_path): + retro_dir = tmp_path / ".loop" / "retro" + retro_dir.mkdir(parents=True) + retro_file = retro_dir / "cycles.jsonl" + retro_file.write_text(json.dumps({"success": True})) + + with patch("dashboard.routes.daily_run.REPO_ROOT", tmp_path): + result = _load_cycle_data(days=7) + + assert result == {"current": 0, "previous": 0} + + +# --------------------------------------------------------------------------- +# _fetch_layer_metrics +# --------------------------------------------------------------------------- + + +def _make_issue(updated_offset_days: int) -> dict: + ts = (datetime.now(UTC) - timedelta(days=updated_offset_days)).isoformat() + return {"updated_at": ts, "labels": [{"name": "layer:triage"}]} + + +def test_fetch_layer_metrics_counts_current_and_previous(): + client = _make_client() + client._available = True + + recent_issue = _make_issue(updated_offset_days=3) + older_issue = _make_issue(updated_offset_days=10) + + with patch.object(client, "get_paginated", return_value=[recent_issue, older_issue]): + layers, total_current, total_previous = _fetch_layer_metrics(client, lookback_days=7) + + # Should have one entry per LAYER_LABELS + assert len(layers) == len(LAYER_LABELS) + triage = next(lm for lm in layers if lm.name == "triage") + assert triage.current_count == 1 + assert triage.previous_count == 1 + + +def test_fetch_layer_metrics_degrades_on_http_error(): + client = _make_client() + client._available = True + + with patch.object(client, "get_paginated", side_effect=URLError("network")): + layers, total_current, total_previous = _fetch_layer_metrics(client, lookback_days=7) + + assert len(layers) == len(LAYER_LABELS) + for lm in layers: + assert lm.current_count == 0 + assert lm.previous_count == 0 + assert total_current == 0 + assert total_previous == 0 + + +# --------------------------------------------------------------------------- +# _get_metrics +# --------------------------------------------------------------------------- + + +def test_get_metrics_returns_none_when_gitea_unavailable(): + with patch("dashboard.routes.daily_run._load_config", return_value=DEFAULT_CONFIG): + with patch("dashboard.routes.daily_run._get_token", return_value=None): + with patch.object(GiteaClient, "is_available", return_value=False): + result = _get_metrics() + assert result is None + + +def test_get_metrics_returns_daily_run_metrics(): + mock_layers = [ + LayerMetrics(name="triage", label="layer:triage", current_count=5, previous_count=3) + ] + with patch("dashboard.routes.daily_run._load_config", return_value=DEFAULT_CONFIG): + with patch("dashboard.routes.daily_run._get_token", return_value="tok"): + with patch.object(GiteaClient, "is_available", return_value=True): + with patch( + "dashboard.routes.daily_run._fetch_layer_metrics", + return_value=(mock_layers, 5, 3), + ): + with patch( + "dashboard.routes.daily_run._load_cycle_data", + return_value={"current": 10, "previous": 8}, + ): + result = _get_metrics(lookback_days=7) + + assert result is not None + assert result.sessions_completed == 10 + assert result.sessions_previous == 8 + assert result.lookback_days == 7 + assert result.layers == mock_layers + + +def test_get_metrics_returns_none_on_exception(): + with patch("dashboard.routes.daily_run._load_config", return_value=DEFAULT_CONFIG): + with patch("dashboard.routes.daily_run._get_token", return_value="tok"): + with patch.object(GiteaClient, "is_available", return_value=True): + with patch( + "dashboard.routes.daily_run._fetch_layer_metrics", + side_effect=Exception("unexpected"), + ): + result = _get_metrics() + assert result is None + + +# --------------------------------------------------------------------------- +# Route handlers (FastAPI) +# --------------------------------------------------------------------------- + + +def test_daily_run_metrics_api_unavailable(client): + with patch("dashboard.routes.daily_run._get_metrics", return_value=None): + resp = client.get("/daily-run/metrics") + assert resp.status_code == 503 + data = resp.json() + assert data["status"] == "unavailable" + + +def test_daily_run_metrics_api_returns_json(client): + mock_metrics = _make_daily_metrics( + layers=[ + LayerMetrics(name="triage", label="layer:triage", current_count=3, previous_count=2) + ] + ) + with patch("dashboard.routes.daily_run._get_metrics", return_value=mock_metrics): + with patch( + "dashboard.routes.quests.check_daily_run_quests", + return_value=[], + create=True, + ): + resp = client.get("/daily-run/metrics?lookback_days=7") + assert resp.status_code == 200 + data = resp.json() + assert data["status"] == "ok" + assert data["lookback_days"] == 7 + assert "sessions" in data + assert "layers" in data + assert "totals" in data + assert len(data["layers"]) == 1 + assert data["layers"][0]["name"] == "triage" + + +def test_daily_run_panel_returns_html(client): + mock_metrics = _make_daily_metrics() + with patch("dashboard.routes.daily_run._get_metrics", return_value=mock_metrics): + with patch("dashboard.routes.daily_run._load_config", return_value=DEFAULT_CONFIG): + resp = client.get("/daily-run/panel") + assert resp.status_code == 200 + assert "text/html" in resp.headers["content-type"] + + +def test_daily_run_panel_when_unavailable(client): + with patch("dashboard.routes.daily_run._get_metrics", return_value=None): + with patch("dashboard.routes.daily_run._load_config", return_value=DEFAULT_CONFIG): + resp = client.get("/daily-run/panel") + assert resp.status_code == 200 -- 2.43.0