Compare commits
1 Commits
fix/190
...
feat/93-de
| Author | SHA1 | Date | |
|---|---|---|---|
| cbebd93cbb |
@@ -1,22 +0,0 @@
|
|||||||
name: Test
|
|
||||||
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
push:
|
|
||||||
branches: [main]
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
pytest:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: actions/checkout@v4
|
|
||||||
- uses: actions/setup-python@v5
|
|
||||||
with:
|
|
||||||
python-version: '3.11'
|
|
||||||
- name: Install test dependencies
|
|
||||||
run: |
|
|
||||||
python -m pip install --upgrade pip
|
|
||||||
pip install -r requirements.txt
|
|
||||||
- name: Run test suite
|
|
||||||
run: |
|
|
||||||
make test
|
|
||||||
4
Makefile
4
Makefile
@@ -1,4 +0,0 @@
|
|||||||
.PHONY: test
|
|
||||||
|
|
||||||
test:
|
|
||||||
python3 -m pytest tests/test_ci_config.py scripts/test_*.py -v
|
|
||||||
@@ -1 +0,0 @@
|
|||||||
pytest>=8,<9
|
|
||||||
249
scripts/dependency_graph.py
Normal file
249
scripts/dependency_graph.py
Normal file
@@ -0,0 +1,249 @@
|
|||||||
|
#!/usr/bin/env python3
|
||||||
|
"""
|
||||||
|
Cross-Repo Dependency Graph Builder
|
||||||
|
|
||||||
|
Scans repos for import/require/reference patterns and builds a directed
|
||||||
|
dependency graph. Detects circular dependencies. Outputs DOT and Mermaid.
|
||||||
|
|
||||||
|
Usage:
|
||||||
|
python3 scripts/dependency_graph.py /path/to/repos/
|
||||||
|
python3 scripts/dependency_graph.py --repos repo1,repo2,repo3 --format mermaid
|
||||||
|
python3 scripts/dependency_graph.py --repos-dir /path/to/ --format dot --output deps.dot
|
||||||
|
|
||||||
|
Patterns detected:
|
||||||
|
- Python: import X, from X import Y
|
||||||
|
- JavaScript: require("X"), import ... from "X"
|
||||||
|
- Go: import "X"
|
||||||
|
- Ansible: include_role, import_role
|
||||||
|
- Docker/Compose: image: X, depends_on
|
||||||
|
- Config references: repo-name in YAML/TOML/JSON
|
||||||
|
"""
|
||||||
|
|
||||||
|
import argparse
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from collections import defaultdict
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
|
||||||
|
# Known repo names for matching
|
||||||
|
KNOWN_REPOS = [
|
||||||
|
"hermes-agent", "timmy-config", "timmy-home", "the-nexus", "the-door",
|
||||||
|
"the-beacon", "fleet-ops", "burn-fleet", "timmy-dispatch", "turboquant",
|
||||||
|
"compounding-intelligence", "the-playground", "second-son-of-timmy",
|
||||||
|
"ai-safety-review", "the-echo-pattern", "timmy-academy", "wolf",
|
||||||
|
"the-testament",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
def normalize_repo_name(name: str) -> str:
|
||||||
|
"""Normalize a repo name for comparison."""
|
||||||
|
return name.lower().replace("_", "-").replace(".git", "").strip()
|
||||||
|
|
||||||
|
|
||||||
|
def scan_file_for_deps(filepath: str, content: str, own_repo: str) -> set:
|
||||||
|
"""Scan a file's content for references to other repos."""
|
||||||
|
deps = set()
|
||||||
|
own_norm = normalize_repo_name(own_repo)
|
||||||
|
|
||||||
|
for repo in KNOWN_REPOS:
|
||||||
|
repo_norm = normalize_repo_name(repo)
|
||||||
|
if repo_norm == own_norm:
|
||||||
|
continue
|
||||||
|
|
||||||
|
# Direct name references
|
||||||
|
patterns = [
|
||||||
|
repo, # exact name
|
||||||
|
repo.replace("-", "_"), # underscore variant
|
||||||
|
repo.replace("-", ""), # no separator
|
||||||
|
f"/{repo}/", # path reference
|
||||||
|
f'"{repo}"', # quoted
|
||||||
|
f"'{repo}'", # single quoted
|
||||||
|
f"Timmy_Foundation/{repo}", # full Gitea path
|
||||||
|
f"Timmy_Foundation.{repo}", # Python module path
|
||||||
|
]
|
||||||
|
|
||||||
|
for pattern in patterns:
|
||||||
|
if pattern in content:
|
||||||
|
deps.add(repo)
|
||||||
|
break
|
||||||
|
|
||||||
|
return deps
|
||||||
|
|
||||||
|
|
||||||
|
def scan_repo(repo_path: str, repo_name: str = None) -> dict:
|
||||||
|
"""Scan a repo directory for dependencies."""
|
||||||
|
path = Path(repo_path)
|
||||||
|
if not path.is_dir():
|
||||||
|
return {"error": f"Not a directory: {repo_path}"}
|
||||||
|
|
||||||
|
if not repo_name:
|
||||||
|
repo_name = path.name
|
||||||
|
|
||||||
|
deps = set()
|
||||||
|
files_scanned = 0
|
||||||
|
exts = {".py", ".js", ".ts", ".go", ".yaml", ".yml", ".toml", ".json",
|
||||||
|
".md", ".sh", ".bash", ".Dockerfile", ".tf", ".hcl"}
|
||||||
|
|
||||||
|
for fpath in path.rglob("*"):
|
||||||
|
if not fpath.is_file():
|
||||||
|
continue
|
||||||
|
if fpath.suffix not in exts:
|
||||||
|
continue
|
||||||
|
# Skip common non-source dirs
|
||||||
|
parts = fpath.parts
|
||||||
|
if any(p in (".git", "node_modules", "__pycache__", ".venv", "venv",
|
||||||
|
"vendor", "dist", "build", ".tox") for p in parts):
|
||||||
|
continue
|
||||||
|
|
||||||
|
try:
|
||||||
|
content = fpath.read_text(errors="ignore")
|
||||||
|
except:
|
||||||
|
continue
|
||||||
|
|
||||||
|
file_deps = scan_file_for_deps(str(fpath), content, repo_name)
|
||||||
|
deps.update(file_deps)
|
||||||
|
files_scanned += 1
|
||||||
|
|
||||||
|
return {
|
||||||
|
"repo": repo_name,
|
||||||
|
"dependencies": sorted(deps),
|
||||||
|
"files_scanned": files_scanned,
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
|
def detect_cycles(graph: dict) -> list:
|
||||||
|
"""Detect circular dependencies using DFS."""
|
||||||
|
cycles = []
|
||||||
|
visited = set()
|
||||||
|
rec_stack = set()
|
||||||
|
|
||||||
|
def dfs(node, path):
|
||||||
|
visited.add(node)
|
||||||
|
rec_stack.add(node)
|
||||||
|
|
||||||
|
for neighbor in graph.get(node, {}).get("dependencies", []):
|
||||||
|
if neighbor not in visited:
|
||||||
|
result = dfs(neighbor, path + [neighbor])
|
||||||
|
if result:
|
||||||
|
return result
|
||||||
|
elif neighbor in rec_stack:
|
||||||
|
cycle_start = path.index(neighbor)
|
||||||
|
return path[cycle_start:] + [neighbor]
|
||||||
|
|
||||||
|
rec_stack.remove(node)
|
||||||
|
return None
|
||||||
|
|
||||||
|
for node in graph:
|
||||||
|
if node not in visited:
|
||||||
|
cycle = dfs(node, [node])
|
||||||
|
if cycle:
|
||||||
|
cycles.append(cycle)
|
||||||
|
|
||||||
|
return cycles
|
||||||
|
|
||||||
|
|
||||||
|
def to_dot(graph: dict) -> str:
|
||||||
|
"""Generate DOT format output."""
|
||||||
|
lines = ["digraph dependencies {"]
|
||||||
|
lines.append(" rankdir=LR;")
|
||||||
|
lines.append(" node [shape=box, style=filled, fillcolor="#1a1a2e", fontcolor="#e6edf3"];")
|
||||||
|
lines.append(" edge [color="#4a4a6a"];")
|
||||||
|
lines.append("")
|
||||||
|
|
||||||
|
for repo, data in sorted(graph.items()):
|
||||||
|
dep_count = len(data.get("dependencies", []))
|
||||||
|
fill = "#2d1b69" if dep_count > 2 else "#16213e"
|
||||||
|
lines.append(f' "{repo}" [fillcolor="{fill}"];')
|
||||||
|
for dep in data.get("dependencies", []):
|
||||||
|
lines.append(f' "{repo}" -> "{dep}";')
|
||||||
|
|
||||||
|
lines.append("}")
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
|
||||||
|
def to_mermaid(graph: dict) -> str:
|
||||||
|
"""Generate Mermaid format output."""
|
||||||
|
lines = ["graph LR"]
|
||||||
|
|
||||||
|
for repo, data in sorted(graph.items()):
|
||||||
|
for dep in data.get("dependencies", []):
|
||||||
|
lines.append(f" {repo.replace('-','_')} --> {dep.replace('-','_')}")
|
||||||
|
|
||||||
|
# Add node labels
|
||||||
|
lines.append("")
|
||||||
|
for repo in sorted(graph.keys()):
|
||||||
|
lines.append(f" {repo.replace('-','_')}[{repo}]")
|
||||||
|
|
||||||
|
return "\n".join(lines)
|
||||||
|
|
||||||
|
|
||||||
|
def main():
|
||||||
|
parser = argparse.ArgumentParser(description="Build cross-repo dependency graph")
|
||||||
|
parser.add_argument("repos_dir", nargs="?", help="Directory containing repos")
|
||||||
|
parser.add_argument("--repos", help="Comma-separated list of repo paths")
|
||||||
|
parser.add_argument("--format", choices=["dot", "mermaid", "json"], default="json")
|
||||||
|
parser.add_argument("--output", "-o", help="Output file (default: stdout)")
|
||||||
|
parser.add_argument("--cycles-only", action="store_true", help="Only report cycles")
|
||||||
|
args = parser.parse_args()
|
||||||
|
|
||||||
|
results = {}
|
||||||
|
repo_paths = []
|
||||||
|
|
||||||
|
if args.repos:
|
||||||
|
repo_paths = [p.strip() for p in args.repos.split(",")]
|
||||||
|
elif args.repos_dir:
|
||||||
|
base = Path(args.repos_dir)
|
||||||
|
repo_paths = [str(p) for p in base.iterdir() if p.is_dir() and not p.name.startswith(".")]
|
||||||
|
else:
|
||||||
|
parser.print_help()
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
for rpath in repo_paths:
|
||||||
|
name = Path(rpath).name
|
||||||
|
print(f"Scanning {name}...", file=sys.stderr)
|
||||||
|
result = scan_repo(rpath, name)
|
||||||
|
if "error" not in result:
|
||||||
|
results[name] = result
|
||||||
|
|
||||||
|
# Detect cycles
|
||||||
|
cycles = detect_cycles(results)
|
||||||
|
|
||||||
|
if args.cycles_only:
|
||||||
|
if cycles:
|
||||||
|
print("CIRCULAR DEPENDENCIES DETECTED:")
|
||||||
|
for cycle in cycles:
|
||||||
|
print(f" {' -> '.join(cycle)}")
|
||||||
|
sys.exit(1)
|
||||||
|
else:
|
||||||
|
print("No circular dependencies found.")
|
||||||
|
sys.exit(0)
|
||||||
|
|
||||||
|
# Output
|
||||||
|
output = {}
|
||||||
|
if args.format == "dot":
|
||||||
|
output = to_dot(results)
|
||||||
|
elif args.format == "mermaid":
|
||||||
|
output = to_mermaid(results)
|
||||||
|
else:
|
||||||
|
output = json.dumps({
|
||||||
|
"repos": results,
|
||||||
|
"cycles": cycles,
|
||||||
|
"summary": {
|
||||||
|
"total_repos": len(results),
|
||||||
|
"total_deps": sum(len(r["dependencies"]) for r in results.values()),
|
||||||
|
"cycles_found": len(cycles),
|
||||||
|
}
|
||||||
|
}, indent=2)
|
||||||
|
|
||||||
|
if args.output:
|
||||||
|
Path(args.output).write_text(output)
|
||||||
|
print(f"Written to {args.output}", file=sys.stderr)
|
||||||
|
else:
|
||||||
|
print(output)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
main()
|
||||||
@@ -1,131 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""
|
|
||||||
Knowledge Store Staleness Detector — Detect stale knowledge entries by comparing source file hashes.
|
|
||||||
|
|
||||||
Usage:
|
|
||||||
python3 scripts/knowledge_staleness_check.py --index knowledge/index.json
|
|
||||||
python3 scripts/knowledge_staleness_check.py --index knowledge/index.json --json
|
|
||||||
python3 scripts/knowledge_staleness_check.py --index knowledge/index.json --fix
|
|
||||||
"""
|
|
||||||
|
|
||||||
import argparse
|
|
||||||
import hashlib
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
from datetime import datetime, timezone
|
|
||||||
from pathlib import Path
|
|
||||||
from typing import Dict, List, Any, Optional
|
|
||||||
|
|
||||||
|
|
||||||
def compute_file_hash(filepath: str) -> Optional[str]:
|
|
||||||
"""Compute SHA-256 hash of a file. Returns None if file doesn't exist."""
|
|
||||||
try:
|
|
||||||
with open(filepath, "rb") as f:
|
|
||||||
return "sha256:" + hashlib.sha256(f.read()).hexdigest()
|
|
||||||
except (FileNotFoundError, IsADirectoryError, PermissionError):
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
def check_staleness(index_path: str, repo_root: str = ".") -> List[Dict[str, Any]]:
|
|
||||||
"""Check all entries in knowledge index for staleness.
|
|
||||||
|
|
||||||
Returns list of entries with staleness info:
|
|
||||||
- status: "fresh" | "stale" | "missing_source" | "no_hash"
|
|
||||||
- current_hash: computed hash (if source exists)
|
|
||||||
- stored_hash: hash from index
|
|
||||||
"""
|
|
||||||
with open(index_path) as f:
|
|
||||||
data = json.load(f)
|
|
||||||
|
|
||||||
facts = data.get("facts", [])
|
|
||||||
results = []
|
|
||||||
|
|
||||||
for entry in facts:
|
|
||||||
source_file = entry.get("source_file")
|
|
||||||
stored_hash = entry.get("source_hash")
|
|
||||||
|
|
||||||
if not source_file:
|
|
||||||
results.append({**entry, "status": "no_source", "current_hash": None})
|
|
||||||
continue
|
|
||||||
|
|
||||||
full_path = os.path.join(repo_root, source_file)
|
|
||||||
current_hash = compute_file_hash(full_path)
|
|
||||||
|
|
||||||
if current_hash is None:
|
|
||||||
results.append({**entry, "status": "missing_source", "current_hash": None})
|
|
||||||
elif not stored_hash:
|
|
||||||
results.append({**entry, "status": "no_hash", "current_hash": current_hash})
|
|
||||||
elif current_hash != stored_hash:
|
|
||||||
results.append({**entry, "status": "stale", "current_hash": current_hash})
|
|
||||||
else:
|
|
||||||
results.append({**entry, "status": "fresh", "current_hash": current_hash})
|
|
||||||
|
|
||||||
return results
|
|
||||||
|
|
||||||
|
|
||||||
def fix_hashes(index_path: str, repo_root: str = ".") -> int:
|
|
||||||
"""Add hashes to entries missing them. Returns count of fixed entries."""
|
|
||||||
with open(index_path) as f:
|
|
||||||
data = json.load(f)
|
|
||||||
|
|
||||||
fixed = 0
|
|
||||||
for entry in data.get("facts", []):
|
|
||||||
if entry.get("source_hash"):
|
|
||||||
continue
|
|
||||||
source_file = entry.get("source_file")
|
|
||||||
if not source_file:
|
|
||||||
continue
|
|
||||||
full_path = os.path.join(repo_root, source_file)
|
|
||||||
h = compute_file_hash(full_path)
|
|
||||||
if h:
|
|
||||||
entry["source_hash"] = h
|
|
||||||
fixed += 1
|
|
||||||
|
|
||||||
with open(index_path, "w") as f:
|
|
||||||
json.dump(data, f, indent=2)
|
|
||||||
|
|
||||||
return fixed
|
|
||||||
|
|
||||||
|
|
||||||
def main():
|
|
||||||
parser = argparse.ArgumentParser(description="Check knowledge store staleness")
|
|
||||||
parser.add_argument("--index", required=True, help="Path to knowledge/index.json")
|
|
||||||
parser.add_argument("--repo", default=".", help="Repo root for source file resolution")
|
|
||||||
parser.add_argument("--json", action="store_true", help="Output as JSON")
|
|
||||||
parser.add_argument("--fix", action="store_true", help="Add hashes to entries missing them")
|
|
||||||
|
|
||||||
args = parser.parse_args()
|
|
||||||
|
|
||||||
if args.fix:
|
|
||||||
fixed = fix_hashes(args.index, args.repo)
|
|
||||||
print(f"Fixed {fixed} entries with missing hashes.")
|
|
||||||
return
|
|
||||||
|
|
||||||
results = check_staleness(args.index, args.repo)
|
|
||||||
|
|
||||||
if args.json:
|
|
||||||
print(json.dumps(results, indent=2))
|
|
||||||
else:
|
|
||||||
stale = [r for r in results if r["status"] != "fresh"]
|
|
||||||
fresh = [r for r in results if r["status"] == "fresh"]
|
|
||||||
|
|
||||||
print(f"Knowledge Store Staleness Check")
|
|
||||||
print(f" Total entries: {len(results)}")
|
|
||||||
print(f" Fresh: {len(fresh)}")
|
|
||||||
print(f" Stale/Issues: {len(stale)}")
|
|
||||||
print()
|
|
||||||
|
|
||||||
if stale:
|
|
||||||
print("Issues found:")
|
|
||||||
for r in stale:
|
|
||||||
status = r["status"]
|
|
||||||
fact = r.get("fact", "?")[:60]
|
|
||||||
source = r.get("source_file", "?")
|
|
||||||
print(f" [{status}] {source}: {fact}")
|
|
||||||
else:
|
|
||||||
print("All entries are fresh!")
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
||||||
@@ -1,129 +0,0 @@
|
|||||||
#!/usr/bin/env python3
|
|
||||||
"""Tests for scripts/knowledge_staleness_check.py — 8 tests."""
|
|
||||||
|
|
||||||
import json
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
import tempfile
|
|
||||||
|
|
||||||
sys.path.insert(0, os.path.dirname(__file__) or ".")
|
|
||||||
import importlib.util
|
|
||||||
spec = importlib.util.spec_from_file_location("ks", os.path.join(os.path.dirname(__file__) or ".", "knowledge_staleness_check.py"))
|
|
||||||
mod = importlib.util.module_from_spec(spec)
|
|
||||||
spec.loader.exec_module(mod)
|
|
||||||
check_staleness = mod.check_staleness
|
|
||||||
fix_hashes = mod.fix_hashes
|
|
||||||
compute_file_hash = mod.compute_file_hash
|
|
||||||
|
|
||||||
|
|
||||||
def test_fresh_entry():
|
|
||||||
with tempfile.TemporaryDirectory() as tmpdir:
|
|
||||||
src = os.path.join(tmpdir, "source.py")
|
|
||||||
with open(src, "w") as f:
|
|
||||||
f.write("print('hello')")
|
|
||||||
h = compute_file_hash(src)
|
|
||||||
idx = os.path.join(tmpdir, "index.json")
|
|
||||||
with open(idx, "w") as f:
|
|
||||||
json.dump({"facts": [{"fact": "hello", "source_file": "source.py", "source_hash": h}]}, f)
|
|
||||||
results = check_staleness(idx, tmpdir)
|
|
||||||
assert results[0]["status"] == "fresh"
|
|
||||||
print("PASS: test_fresh_entry")
|
|
||||||
|
|
||||||
|
|
||||||
def test_stale_entry():
|
|
||||||
with tempfile.TemporaryDirectory() as tmpdir:
|
|
||||||
src = os.path.join(tmpdir, "source.py")
|
|
||||||
with open(src, "w") as f:
|
|
||||||
f.write("original content")
|
|
||||||
idx = os.path.join(tmpdir, "index.json")
|
|
||||||
with open(idx, "w") as f:
|
|
||||||
json.dump({"facts": [{"fact": "old", "source_file": "source.py", "source_hash": "sha256:wrong"}]}, f)
|
|
||||||
# Now change the source
|
|
||||||
with open(src, "w") as f:
|
|
||||||
f.write("modified content")
|
|
||||||
results = check_staleness(idx, tmpdir)
|
|
||||||
assert results[0]["status"] == "stale"
|
|
||||||
print("PASS: test_stale_entry")
|
|
||||||
|
|
||||||
|
|
||||||
def test_missing_source():
|
|
||||||
with tempfile.TemporaryDirectory() as tmpdir:
|
|
||||||
idx = os.path.join(tmpdir, "index.json")
|
|
||||||
with open(idx, "w") as f:
|
|
||||||
json.dump({"facts": [{"fact": "gone", "source_file": "nonexistent.py", "source_hash": "sha256:abc"}]}, f)
|
|
||||||
results = check_staleness(idx, tmpdir)
|
|
||||||
assert results[0]["status"] == "missing_source"
|
|
||||||
print("PASS: test_missing_source")
|
|
||||||
|
|
||||||
|
|
||||||
def test_no_hash():
|
|
||||||
with tempfile.TemporaryDirectory() as tmpdir:
|
|
||||||
src = os.path.join(tmpdir, "source.py")
|
|
||||||
with open(src, "w") as f:
|
|
||||||
f.write("content")
|
|
||||||
idx = os.path.join(tmpdir, "index.json")
|
|
||||||
with open(idx, "w") as f:
|
|
||||||
json.dump({"facts": [{"fact": "no hash", "source_file": "source.py"}]}, f)
|
|
||||||
results = check_staleness(idx, tmpdir)
|
|
||||||
assert results[0]["status"] == "no_hash"
|
|
||||||
assert results[0]["current_hash"].startswith("sha256:")
|
|
||||||
print("PASS: test_no_hash")
|
|
||||||
|
|
||||||
|
|
||||||
def test_no_source_field():
|
|
||||||
with tempfile.TemporaryDirectory() as tmpdir:
|
|
||||||
idx = os.path.join(tmpdir, "index.json")
|
|
||||||
with open(idx, "w") as f:
|
|
||||||
json.dump({"facts": [{"fact": "orphan"}]}, f)
|
|
||||||
results = check_staleness(idx, tmpdir)
|
|
||||||
assert results[0]["status"] == "no_source"
|
|
||||||
print("PASS: test_no_source_field")
|
|
||||||
|
|
||||||
|
|
||||||
def test_fix_hashes():
|
|
||||||
with tempfile.TemporaryDirectory() as tmpdir:
|
|
||||||
src = os.path.join(tmpdir, "source.py")
|
|
||||||
with open(src, "w") as f:
|
|
||||||
f.write("content for hashing")
|
|
||||||
idx = os.path.join(tmpdir, "index.json")
|
|
||||||
with open(idx, "w") as f:
|
|
||||||
json.dump({"facts": [{"fact": "needs hash", "source_file": "source.py"}]}, f)
|
|
||||||
fixed = fix_hashes(idx, tmpdir)
|
|
||||||
assert fixed == 1
|
|
||||||
# Verify hash was added
|
|
||||||
with open(idx) as f:
|
|
||||||
data = json.load(f)
|
|
||||||
assert data["facts"][0]["source_hash"].startswith("sha256:")
|
|
||||||
print("PASS: test_fix_hashes")
|
|
||||||
|
|
||||||
|
|
||||||
def test_empty_index():
|
|
||||||
with tempfile.TemporaryDirectory() as tmpdir:
|
|
||||||
idx = os.path.join(tmpdir, "index.json")
|
|
||||||
with open(idx, "w") as f:
|
|
||||||
json.dump({"facts": []}, f)
|
|
||||||
results = check_staleness(idx, tmpdir)
|
|
||||||
assert results == []
|
|
||||||
print("PASS: test_empty_index")
|
|
||||||
|
|
||||||
|
|
||||||
def test_compute_hash_nonexistent():
|
|
||||||
h = compute_file_hash("/nonexistent/path/file.py")
|
|
||||||
assert h is None
|
|
||||||
print("PASS: test_compute_hash_nonexistent")
|
|
||||||
|
|
||||||
|
|
||||||
def run_all():
|
|
||||||
test_fresh_entry()
|
|
||||||
test_stale_entry()
|
|
||||||
test_missing_source()
|
|
||||||
test_no_hash()
|
|
||||||
test_no_source_field()
|
|
||||||
test_fix_hashes()
|
|
||||||
test_empty_index()
|
|
||||||
test_compute_hash_nonexistent()
|
|
||||||
print("\nAll 8 tests passed!")
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
run_all()
|
|
||||||
@@ -1,19 +0,0 @@
|
|||||||
from pathlib import Path
|
|
||||||
|
|
||||||
|
|
||||||
def test_requirements_makefile_and_workflow_exist() -> None:
|
|
||||||
assert Path("requirements.txt").exists()
|
|
||||||
assert Path("Makefile").exists()
|
|
||||||
assert Path(".gitea/workflows/test.yml").exists()
|
|
||||||
|
|
||||||
|
|
||||||
def test_ci_workflow_runs_project_test_command() -> None:
|
|
||||||
workflow = Path(".gitea/workflows/test.yml").read_text(encoding="utf-8")
|
|
||||||
requirements = Path("requirements.txt").read_text(encoding="utf-8")
|
|
||||||
makefile = Path("Makefile").read_text(encoding="utf-8")
|
|
||||||
|
|
||||||
assert "pytest" in requirements
|
|
||||||
assert "test:" in makefile
|
|
||||||
assert "python3 -m pytest tests/test_ci_config.py scripts/test_*.py -v" in makefile
|
|
||||||
assert "pip install -r requirements.txt" in workflow
|
|
||||||
assert "make test" in workflow
|
|
||||||
Reference in New Issue
Block a user