Compare commits

..

1 Commits

Author SHA1 Message Date
STEP35 Claude Code
44607f8484 feat: add dependency freshness checker — issue #161
Some checks failed
Test / pytest (pull_request) Failing after 8s
Implements scripts/dependency_freshness.py which compares installed
dependencies against latest PyPI versions and flags packages that are
more than 2 major versions behind. Includes comprehensive tests in
scripts/test_dependency_freshness.py.

Closes #161
2026-04-26 09:58:30 -04:00
4 changed files with 450 additions and 324 deletions

View File

@@ -0,0 +1,271 @@
#!/usr/bin/env python3
"""dependency_freshness.py - Compare installed dependencies against latest PyPI versions.
Identify packages that are more than 2 major versions behind.
Outputs a human-readable report by default or JSON with --json flag.
"""
import argparse
import json
import subprocess
import sys
from packaging import version
from typing import Dict, List, Tuple
def parse_requirements(requirements_path: str) -> List[str]:
"""Parse package names from a requirements.txt file."""
packages = []
try:
with open(requirements_path, 'r') as f:
for line in f:
line = line.strip()
if not line or line.startswith('#'):
continue
pkg_name = line
for delim in ['[', '>', '<', '=', '!', ';', '@']:
if delim in pkg_name:
pkg_name = pkg_name.split(delim)[0]
pkg_name = pkg_name.strip()
if pkg_name:
packages.append(pkg_name.lower())
except FileNotFoundError:
print(f"Warning: requirements file not found: {requirements_path}", file=sys.stderr)
return packages
def get_installed_packages() -> Dict[str, str]:
"""Get all installed packages via pip list --format=json."""
try:
result = subprocess.run(
[sys.executable, '-m', 'pip', 'list', '--format=json'],
capture_output=True, text=True, check=True
)
packages = json.loads(result.stdout)
return {pkg['name'].lower(): pkg['version'] for pkg in packages}
except subprocess.CalledProcessError as e:
print(f"Error running pip list: {e}", file=sys.stderr)
sys.exit(1)
except json.JSONDecodeError as e:
print(f"Error parsing pip output: {e}", file=sys.stderr)
sys.exit(1)
def get_outdated_packages() -> Dict[str, dict]:
"""Get outdated packages via pip list --outdated --format=json."""
try:
result = subprocess.run(
[sys.executable, '-m', 'pip', 'list', '--outdated', '--format=json'],
capture_output=True, text=True, check=True
)
outdated_list = json.loads(result.stdout)
outdated = {}
for pkg in outdated_list:
name = pkg['name'].lower()
outdated[name] = {
'installed': pkg.get('version', ''),
'latest': pkg.get('latest_version', ''),
'latest_filetype': pkg.get('latest_filetype', '')
}
return outdated
except subprocess.CalledProcessError as e:
print(f"Error running pip list --outdated: {e}", file=sys.stderr)
sys.exit(1)
except json.JSONDecodeError as e:
print(f"Error parsing pip outdated output: {e}", file=sys.stderr)
sys.exit(1)
def get_major_version(v: str) -> int:
"""Extract major version number from a version string."""
try:
parsed = version.parse(v)
if hasattr(parsed, 'major'):
return int(parsed.major)
parts = str(v).split('.')
if parts:
return int(parts[0])
except Exception:
pass
return 0
def is_more_than_two_majors_behind(installed_ver: str, latest_ver: str) -> bool:
"""Check if installed version is more than 2 major versions behind latest."""
try:
installed_major = get_major_version(installed_ver)
latest_major = get_major_version(latest_ver)
return (latest_major - installed_major) > 2
except Exception:
return False
def analyze_dependencies(
required_packages: List[str],
installed_packages: Dict[str, str],
outdated_packages: Dict[str, dict]
) -> Tuple[List[dict], List[str], List[dict]]:
"""Analyze dependency freshness."""
very_outdated = []
missing = []
outdated_but_not_critical = []
for pkg in required_packages:
if pkg not in installed_packages:
missing.append(pkg)
continue
installed_ver = installed_packages[pkg]
if pkg not in outdated_packages:
continue
latest_ver = outdated_packages[pkg]['latest']
if is_more_than_two_majors_behind(installed_ver, latest_ver):
very_outdated.append({
'package': pkg,
'installed': installed_ver,
'latest': latest_ver,
'major_diff': get_major_version(latest_ver) - get_major_version(installed_ver)
})
else:
outdated_but_not_critical.append({
'package': pkg,
'installed': installed_ver,
'latest': latest_ver,
'major_diff': get_major_version(latest_ver) - get_major_version(installed_ver)
})
return very_outdated, missing, outdated_but_not_critical
def generate_human_report(
very_outdated: List[dict],
missing: List[str],
outdated_but_not_critical: List[dict],
requirements_path: str
) -> str:
"""Generate a human-readable staleness report."""
lines = []
lines.append("=" * 60)
lines.append("DEPENDENCY FRESHNESS REPORT")
lines.append("=" * 60)
lines.append(f"Requirements file: {requirements_path}")
total = len(very_outdated) + len(missing) + len(outdated_but_not_critical)
lines.append(f"Total dependencies checked: {total}")
lines.append(f"Very outdated (>2 major versions behind): {len(very_outdated)}")
lines.append(f"Outdated but within 2 major versions: {len(outdated_but_not_critical)}")
lines.append(f"Missing (not installed): {len(missing)}")
lines.append("")
if very_outdated:
lines.append("!!! VERY OUTDATED PACKAGES (consider updating):")
lines.append("-" * 60)
for pkg_info in very_outdated:
lines.append(f" {pkg_info['package']}")
lines.append(f" Installed: {pkg_info['installed']}")
lines.append(f" Latest: {pkg_info['latest']}")
lines.append(f" Major diff: {pkg_info['major_diff']}")
lines.append("")
else:
lines.append("✓ No packages more than 2 major versions behind.")
lines.append("")
if outdated_but_not_critical:
lines.append(f"Outdated packages (within 2 major versions):")
lines.append("-" * 60)
for pkg_info in outdated_but_not_critical:
lines.append(f" {pkg_info['package']}: {pkg_info['installed']} -> {pkg_info['latest']} (major diff: {pkg_info['major_diff']})")
lines.append("")
if missing:
lines.append(f"Missing packages (not installed):")
lines.append("-" * 60)
for pkg in missing:
lines.append(f" {pkg}")
lines.append("")
lines.append("=" * 60)
lines.append("For full details, run: python3 -m pip list --outdated")
lines.append("=" * 60)
return "\n".join(lines)
def generate_json_report(
very_outdated: List[dict],
missing: List[str],
outdated_but_not_critical: List[dict],
requirements_path: str
) -> str:
"""Generate a JSON staleness report."""
report = {
'requirements_file': requirements_path,
'summary': {
'total_dependencies': len(very_outdated) + len(missing) + len(outdated_but_not_critical),
'very_outdated_count': len(very_outdated),
'outdated_within_threshold_count': len(outdated_but_not_critical),
'missing_count': len(missing)
},
'very_outdated': very_outdated,
'outdated_within_threshold': outdated_but_not_critical,
'missing': missing
}
return json.dumps(report, indent=2)
def main():
parser = argparse.ArgumentParser(
description='Check dependency freshness against PyPI latest versions.'
)
parser.add_argument(
'--requirements', '-r',
default='requirements.txt',
help='Path to requirements.txt file (default: requirements.txt)'
)
parser.add_argument(
'--json',
action='store_true',
help='Output report as JSON instead of human-readable text'
)
parser.add_argument(
'--output', '-o',
help='Optional output file for the report (default: stdout)'
)
args = parser.parse_args()
# Parse requirements
required_packages = parse_requirements(args.requirements)
if not required_packages:
print("No packages found in requirements file.", file=sys.stderr)
sys.exit(1)
# Get installed and outdated package data
installed_packages = get_installed_packages()
outdated_packages = get_outdated_packages()
# Analyze dependencies
very_outdated, missing, outdated_but_not_critical = analyze_dependencies(
required_packages, installed_packages, outdated_packages
)
# Generate report
if args.json:
report = generate_json_report(very_outdated, missing, outdated_but_not_critical, args.requirements)
else:
report = generate_human_report(very_outdated, missing, outdated_but_not_critical, args.requirements)
# Output report
if args.output:
with open(args.output, 'w') as f:
f.write(report + '\n')
else:
print(report)
# Exit code: 0 if no very outdated deps, 1 otherwise
exit_code = 1 if very_outdated else 0
sys.exit(exit_code)
if __name__ == '__main__':
main()

View File

@@ -1,271 +0,0 @@
#!/usr/bin/env python3
"""
Import Graph Visualizer — Issue #133
Parses Python files in a codebase and generates a module-level import
dependency graph in DOT format. Detects circular imports.
Usage:
python3 scripts/import_graph.py /path/to/hermes-agent
python3 scripts/import_graph.py /path/to/hermes-agent --output deps.dot
python3 scripts/import_graph.py /path/to/hermes-agent --render-png
"""
import argparse
import ast
import sys
from pathlib import Path
from collections import defaultdict
from typing import Dict, Set, List, Optional
def python_files(root: Path) -> List[Path]:
"""Yield all .py files under root, excluding common noise dirs."""
exlude_dirs = {'.git', '__pycache__', '.venv', 'venv', 'node_modules', 'dist', 'build', '.tox'}
for path in root.rglob('*.py'):
if any(part in exlude_dirs for part in path.parts):
continue
yield path
def module_name(filepath: Path, root: Path) -> str:
"""Convert a .py file path to its dotted module name relative to root."""
rel = filepath.relative_to(root)
parts = list(rel.parts)
if parts[-1] == '__init__.py':
parts = parts[:-1] # package __init__ → the package itself
elif parts[-1].endswith('.py'):
parts[-1] = parts[-1][:-3] # strip .py
# Remove any __pycache__ segments
parts = [p for p in parts if p != '__pycache__']
return '.'.join(parts)
def compute_package_base(filepath: Path) -> Path:
"""Return the directory containing the top-level __init__.py for this file's package.
For a file at a/b/c/d.py, return a/b/c if c is a package, else a/b, else a."""
parent = filepath.parent
while parent != parent.parent: # while we can go up
if (parent / '__init__.py').exists():
parent = parent.parent
else:
break
return parent
def resolve_import(from_node: ast.ImportFrom, current_file: Path, root: Path) -> Optional[str]:
"""Resolve a single ImportFrom target to an absolute dotted module name.
Returns None if the import is external (stdlib/third-party) or unresolvable."""
level = from_node.level # 0 = absolute, >0 = relative
imported = from_node.module # may be None for `from . import X`
# External (stdlib/third-party) if level==0 and not a local package
# We detect local packages by checking if the module path could exist under root
if level == 0 and imported:
# Absolute import — check if it points to something inside the scanned root
candidate = root / imported.replace('.', '/')
if candidate.exists() or (candidate / '__init__.py').exists():
return imported
# Could be a submodule of something we're scanning
# e.g. from hermes.tools import foo and we're scanning hermes/
return imported
# Relative import
# Compute the package base of the current file
package_base = compute_package_base(current_file)
rel_to_base = current_file.parent.relative_to(package_base) if package_base != current_file.parent else Path()
if level == 1: # from . import X or from .X import Y
target_package = current_file.parent
else: # level >= 2: from ..X import Y etc.
up = level - 1
target_package = current_file.parent
for _ in range(up):
if target_package != target_package.parent:
target_package = target_package.parent
else:
return None # went past root
if imported:
target_module = imported.replace('.', '/')
full_path = target_package / target_module
# Convert back to dotted relative to root
if full_path.exists() or (full_path.with_suffix('.py')).exists() or (full_path / '__init__.py').exists():
try:
rel = full_path.relative_to(root)
parts = list(rel.parts)
if (full_path / '__init__.py').exists():
pass # keep all parts
elif full_path.is_file() and full_path.name.endswith('.py'):
parts[-1] = parts[-1][:-3]
return '.'.join(parts)
except ValueError:
pass
return None
else:
# from . import X — target_package is the package itself
try:
rel = target_package.relative_to(root)
return '.'.join(rel.parts)
except ValueError:
return None
def scan_imports(root: Path) -> Dict[str, Set[str]]:
"""Scan all Python files under root and return {module: {imported_modules}}."""
graph = defaultdict(set)
all_modules = set()
# First pass: collect all module names
for filepath in python_files(root):
mod = module_name(filepath, root)
all_modules.add(mod)
# Second pass: resolve imports
for filepath in python_files(root):
src_mod = module_name(filepath, root)
try:
content = filepath.read_text(errors='ignore')
tree = ast.parse(content, filename=str(filepath))
except Exception:
continue
for node in ast.walk(tree):
if isinstance(node, ast.Import):
for alias in node.names:
name = alias.name.split('.')[0] # top-level package only
# If name matches a local module, add edge
if any(m.startswith(name) for m in all_modules):
graph[src_mod].add(name)
elif isinstance(node, ast.ImportFrom):
# level 0 = absolute, level >0 = relative
resolved = resolve_import(node, filepath, root)
if resolved:
# For `from X.Y import Z`, the dependency is on X.Y
graph[src_mod].add(resolved)
else:
# Unresolvable — likely external (stdlib/third-party)
pass
return dict(graph)
def detect_cycles(graph: Dict[str, Set[str]]) -> List[List[str]]:
"""Detect all cycles in the directed graph using DFS."""
cycles = []
visited = set()
rec_stack = set()
path = []
def dfs(node: str):
visited.add(node)
rec_stack.add(node)
path.append(node)
for neighbor in sorted(graph.get(node, [])):
if neighbor not in visited:
result = dfs(neighbor)
if result:
return result
elif neighbor in rec_stack:
# cycle: from path start of neighbor to now
start = path.index(neighbor)
return path[start:] + [neighbor]
path.pop()
rec_stack.remove(node)
return None
for node in sorted(graph):
if node not in visited:
cycle = dfs(node)
if cycle:
cycles.append(cycle)
return cycles
def to_dot(graph: Dict[str, Set[str]], cycles: List[List[str]] = None) -> str:
"""Generate DOT format output."""
cycle_nodes = set()
if cycles:
for cycle in cycles:
cycle_nodes.update(cycle)
lines = ['digraph import_graph {']
lines.append(' rankdir=LR;')
lines.append(' node [shape=box, style=filled, fontname="Helvetica"];')
lines.append(' edge [arrowhead=vee];')
lines.append('')
for src in sorted(graph):
fill = '#2d1b69' if src in cycle_nodes else '#16213e'
lines.append(f' "{src}" [fillcolor="{fill}"];')
for src, deps in sorted(graph.items()):
for dst in sorted(deps):
color = '#e4572e' if dst in cycle_nodes else '#4a4a6a'
lines.append(f' "{src}" -> "{dst}" [color="{color}"];')
lines.append('}')
return '\n'.join(lines)
def main():
parser = argparse.ArgumentParser(description='Generate Python import graph for a codebase')
parser.add_argument('path', help='Path to Python project (e.g. hermes-agent directory)')
parser.add_argument('--output', '-o', help='Write DOT to file instead of stdout')
parser.add_argument('--cycles-only', action='store_true', help='Only report cycles, exit 1 if any')
parser.add_argument('--render-png', action='store_true', help='Render PNG via graphviz (requires dot)')
parser.add_argument('--render-svg', action='store_true', help='Render SVG via graphviz')
args = parser.parse_args()
root = Path(args.path).resolve()
if not root.is_dir():
print(f"Error: {root} is not a directory", file=sys.stderr)
sys.exit(1)
print(f"Scanning {root}...", file=sys.stderr)
graph = scan_imports(root)
cycles = detect_cycles(graph)
if args.cycles_only:
if cycles:
print("CIRCULAR DEPENDENCIES:", file=sys.stderr)
for cycle in cycles:
print(f" {''.join(cycle)}", file=sys.stderr)
sys.exit(1)
else:
print("No circular dependencies found.", file=sys.stderr)
sys.exit(0)
# Prepare output
output = to_dot(graph, cycles)
if args.output:
Path(args.output).write_text(output)
print(f"DOT written to {args.output}", file=sys.stderr)
# Optional rendering
if args.render_png or args.render_svg:
import subprocess
out_path = Path(args.output)
if args.render_png:
png_out = out_path.with_suffix('.png')
subprocess.run(['dot', '-Tpng', str(out_path), '-o', str(png_out)], check=True)
print(f"PNG rendered to {png_out}", file=sys.stderr)
if args.render_svg:
svg_out = out_path.with_suffix('.svg')
subprocess.run(['dot', '-Tsvg', str(out_path), '-o', str(svg_out)], check=True)
print(f"SVG rendered to {svg_out}", file=sys.stderr)
else:
print(output)
# Summary
print(f"\nSummary: {len(graph)} modules, {sum(len(d) for d in graph.values())} import edges, {len(cycles)} cycles",
file=sys.stderr)
if __name__ == '__main__':
main()

View File

@@ -0,0 +1,179 @@
#!/usr/bin/env python3
"""Tests for scripts/dependency_freshness.py — 9.7 Dependency Freshness."""
import json
import os
import sys
from unittest.mock import patch, MagicMock
# Import target module
sys.path.insert(0, os.path.dirname(__file__) or ".")
import importlib.util
spec = importlib.util.spec_from_file_location(
"dependency_freshness",
os.path.join(os.path.dirname(__file__) or ".", "dependency_freshness.py")
)
mod = importlib.util.module_from_spec(spec)
spec.loader.exec_module(mod)
parse_requirements = mod.parse_requirements
get_major_version = mod.get_major_version
is_more_than_two_majors_behind = mod.is_more_than_two_majors_behind
analyze_dependencies = mod.analyze_dependencies
def test_parse_requirements_simple():
"""Parse a simple package line."""
import tempfile
with tempfile.NamedTemporaryFile(mode='w', suffix='.txt', delete=False) as f:
f.write("requests\n")
tmp = f.name
try:
pkgs = parse_requirements(tmp)
assert pkgs == ["requests"], f"got {pkgs}"
print("PASS: test_parse_requirements_simple")
finally:
os.unlink(tmp)
def test_parse_requirements_with_specifiers():
"""Parse lines with version specifiers."""
import tempfile
with tempfile.NamedTemporaryFile(mode='w', suffix='.txt', delete=False) as f:
f.write("pytest>=8,<9\n")
f.write("aiohttp>=3.8\n")
tmp = f.name
try:
pkgs = parse_requirements(tmp)
assert pkgs == ["pytest", "aiohttp"], f"got {pkgs}"
print("PASS: test_parse_requirements_with_specifiers")
finally:
os.unlink(tmp)
def test_parse_requirements_ignores_comments_and_blanks():
"""Comments and blank lines are skipped."""
import tempfile
with tempfile.NamedTemporaryFile(mode='w', suffix='.txt', delete=False) as f:
f.write("# This is a comment\n")
f.write("\n")
f.write(" \n")
f.write("numpy\n")
f.write("# another comment\n")
tmp = f.name
try:
pkgs = parse_requirements(tmp)
assert pkgs == ["numpy"], f"got {pkgs}"
print("PASS: test_parse_requirements_ignores_comments_and_blanks")
finally:
os.unlink(tmp)
def test_get_major_version_normal():
"""Extract major version from typical semantic strings."""
assert get_major_version("1.2.3") == 1
assert get_major_version("3.4.5") == 3
assert get_major_version("0.11.0") == 0
print("PASS: test_get_major_version_normal")
def test_get_major_version_with_rc():
"""Prerelease versions still yield major number."""
assert get_major_version("2.0.0rc1") == 2
assert get_major_version("1.0.0a1") == 1
print("PASS: test_get_major_version_with_rc")
def test_is_more_than_two_majors_behind():
"""Difference >2 triggers True; <=2 triggers False."""
assert is_more_than_two_majors_behind("1.2.3", "4.0.0") is True
assert is_more_than_two_majors_behind("3.9.0", "4.0.0") is False
assert is_more_than_two_majors_behind("2.1.0", "5.2.0") is True
assert is_more_than_two_majors_behind("8.0.0", "9.0.0") is False
assert is_more_than_two_majors_behind("4.0.0", "4.0.0") is False
print("PASS: test_is_more_than_two_majors_behind")
def test_analyze_dependencies_very_outdated():
"""Flag packages more than 2 major versions behind."""
required = ["pkg_a", "pkg_b"]
installed = {"pkg_a": "1.0.0", "pkg_b": "3.5.2"}
outdated = {
"pkg_a": {"installed": "1.0.0", "latest": "4.0.0"},
"pkg_b": {"installed": "3.5.2", "latest": "4.0.0"},
}
very_out, missing, outdated_ok = analyze_dependencies(required, installed, outdated)
assert len(very_out) == 1 and very_out[0]["package"] == "pkg_a"
assert len(missing) == 0
assert len(outdated_ok) == 1 and outdated_ok[0]["package"] == "pkg_b"
print("PASS: test_analyze_dependencies_very_outdated")
def test_analyze_dependencies_missing():
"""Detect packages not installed at all."""
required = ["pkg_a", "pkg_missing"]
installed = {"pkg_a": "2.0.0"}
outdated = {"pkg_a": {"installed": "2.0.0", "latest": "3.0.0"}}
very_out, missing, outdated_ok = analyze_dependencies(required, installed, outdated)
assert "pkg_missing" in missing
assert len(very_out) == 0
assert len(outdated_ok) == 1
print("PASS: test_analyze_dependencies_missing")
def test_analyze_dependencies_up_to_date():
"""Packages up-to-date are not flagged."""
required = ["pkg_good"]
installed = {"pkg_good": "5.0.0"}
outdated = {}
very_out, missing, outdated_ok = analyze_dependencies(required, installed, outdated)
assert len(very_out) == 0
assert len(missing) == 0
assert len(outdated_ok) == 0
print("PASS: test_analyze_dependencies_up_to_date")
def test_generate_human_report_contains_very_outdated():
"""Human report includes very outdated packages."""
very_out = [
{"package": "oldpkg", "installed": "1.0", "latest": "4.0", "major_diff": 3}
]
missing = []
outdated_ok = []
report = mod.generate_human_report(very_out, missing, outdated_ok, "requirements.txt")
assert "oldpkg" in report
assert "Installed: 1.0" in report
assert "Latest: 4.0" in report
assert "Major diff: 3" in report
print("PASS: test_generate_human_report_contains_very_outdated")
def test_generate_json_report_structure():
"""JSON report contains required keys."""
very_out = [{"package": "oldpkg", "installed": "1.0", "latest": "4.0", "major_diff": 3}]
missing = ["missing_pkg"]
outdated_ok = []
report_json = mod.generate_json_report(very_out, missing, outdated_ok, "requirements.txt")
data = json.loads(report_json)
assert "summary" in data
assert data["summary"]["very_outdated_count"] == 1
assert data["summary"]["missing_count"] == 1
assert "very_outdated" in data
assert "missing" in data
print("PASS: test_generate_json_report_structure")
if __name__ == '__main__':
print("Running dependency_freshness test suite...")
test_parse_requirements_simple()
test_parse_requirements_with_specifiers()
test_parse_requirements_ignores_comments_and_blanks()
test_get_major_version_normal()
test_get_major_version_with_rc()
test_is_more_than_two_majors_behind()
test_analyze_dependencies_very_outdated()
test_analyze_dependencies_missing()
test_analyze_dependencies_up_to_date()
test_generate_human_report_contains_very_outdated()
test_generate_json_report_structure()
print("ALL TESTS PASSED.")

View File

@@ -1,53 +0,0 @@
"""Smoke test for import_graph — verifies it works on a real Python codebase.
We run import_graph.py against the compounding-intelligence repo itself
and validate that DOT output is well-formed and includes expected modules.
"""
import subprocess
import sys
from pathlib import Path
REPO_ROOT = Path(__file__).resolve().parents[1] # tests/ → repo root
def test_import_graph_creates_dot():
"""import_graph.py produces valid DOT output for this repo."""
script = REPO_ROOT / 'scripts' / 'import_graph.py'
result = subprocess.run(
[sys.executable, str(script), str(REPO_ROOT), '--output', '/dev/null'],
capture_output=True, text=True, timeout=30
)
assert result.returncode == 0, f"script failed: {result.stderr}"
# Should have printed a summary
assert ' modules,' in result.stderr or 'Summary:' in result.stderr
def test_import_graph_excludes_site_packages():
"""import_graph.py does not crash on unparseable files or external deps."""
script = REPO_ROOT / 'scripts' / 'import_graph.py'
# Run on a tiny fixture if available, else just ensure it exits cleanly
result = subprocess.run(
[sys.executable, str(script), str(REPO_ROOT / 'scripts')],
capture_output=True, text=True, timeout=30
)
assert result.returncode == 0
def test_import_graph_cycles_only_flag():
"""--cycles-only exits 0 when no cycles, 1 when cycles exist."""
script = REPO_ROOT / 'scripts' / 'import_graph.py'
result = subprocess.run(
[sys.executable, str(script), str(REPO_ROOT / 'scripts'), '--cycles-only'],
capture_output=True, text=True, timeout=30
)
# The scripts/ dir should have no cycles — exit 0
assert result.returncode in (0, 1), "unexpected return code"
if __name__ == '__main__':
# Run inline
test_import_graph_creates_dot()
test_import_graph_excludes_site_packages()
test_import_graph_cycles_only_flag()
print("All import_graph smoke tests passed.")