Compare commits
1 Commits
step35/96-
...
step35/133
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
4998c5b6bf |
@@ -1,203 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Docstring Generator — find and add missing docstrings.
|
||||
|
||||
Scans Python files for functions/async functions lacking docstrings.
|
||||
Generates Google-style docstrings from function signature and body.
|
||||
Inserts them in place.
|
||||
|
||||
Usage:
|
||||
python3 docstring_generator.py scripts/ # Fix in place
|
||||
python3 docstring_generator.py --dry-run scripts/ # Preview changes
|
||||
python3 docstring_generator.py --json scripts/ # Machine-readable output
|
||||
python3 docstring_generator.py path/to/file.py
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import ast
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Optional, Tuple, List
|
||||
|
||||
|
||||
# --- Helper: turn snake_case into Title Case phrase ---
|
||||
def name_to_title(name: str) -> str:
|
||||
"""Convert snake_case function name to a Title Case description."""
|
||||
words = name.replace('_', ' ').split()
|
||||
if not words:
|
||||
return ''
|
||||
titled = []
|
||||
for w in words:
|
||||
if len(w) <= 2:
|
||||
titled.append(w.upper())
|
||||
else:
|
||||
titled.append(w[0].upper() + w[1:])
|
||||
return ' '.join(titled)
|
||||
|
||||
|
||||
# --- Helper: extract first meaningful statement from body for summary ---
|
||||
def extract_body_hint(body: list[ast.stmt]) -> Optional[str]:
|
||||
"""Look for an assignment or return that hints at function purpose."""
|
||||
for stmt in body:
|
||||
if isinstance(stmt, ast.Expr) and isinstance(stmt.value, ast.Constant):
|
||||
continue # skip existing docstring placeholder
|
||||
# Assignment to a result-like variable?
|
||||
if isinstance(stmt, ast.Assign):
|
||||
for target in stmt.targets:
|
||||
if isinstance(target, ast.Name):
|
||||
var_name = target.id
|
||||
if var_name in ('result', 'msg', 'output', 'retval', 'value', 'response', 'data'):
|
||||
val = ast.unparse(stmt.value).strip()
|
||||
if val:
|
||||
return f"Compute or return {val}"
|
||||
# Return statement
|
||||
if isinstance(stmt, ast.Return) and stmt.value:
|
||||
ret = ast.unparse(stmt.value).strip()
|
||||
if ret:
|
||||
return f"Return {ret}"
|
||||
break
|
||||
return None
|
||||
|
||||
|
||||
# --- Generate a docstring string for a function ---
|
||||
def generate_docstring(func_node: ast.FunctionDef | ast.AsyncFunctionDef) -> str:
|
||||
"""Build a Google-style docstring for the given function node."""
|
||||
parts: list[str] = []
|
||||
|
||||
# Summary line
|
||||
summary = name_to_title(func_node.name)
|
||||
body_hint = extract_body_hint(func_node.body)
|
||||
if body_hint:
|
||||
summary = f"{summary}. {body_hint}"
|
||||
parts.append(summary)
|
||||
|
||||
# Args section if there are parameters (excluding self/cls)
|
||||
args = func_node.args.args
|
||||
if args:
|
||||
arg_lines = []
|
||||
for arg in args:
|
||||
if arg.arg in ('self', 'cls'):
|
||||
continue
|
||||
type_ann = ast.unparse(arg.annotation) if arg.annotation else 'Any'
|
||||
arg_lines.append(f"{arg.arg} ({type_ann}): Parameter {arg.arg}")
|
||||
if arg_lines:
|
||||
parts.append("\nArgs:\n " + "\n ".join(arg_lines))
|
||||
|
||||
# Returns section
|
||||
if func_node.returns:
|
||||
ret_type = ast.unparse(func_node.returns)
|
||||
parts.append(f"\nReturns:\n {ret_type}: Return value")
|
||||
elif any(isinstance(s, ast.Return) and s.value is not None for s in ast.walk(func_node)):
|
||||
parts.append("\nReturns:\n Return value")
|
||||
|
||||
return '"""' + '\n'.join(parts) + '\n"""'
|
||||
|
||||
|
||||
# --- Transform source AST ---
|
||||
def process_source(source: str, filename: str) -> Tuple[str, List[str]]:
|
||||
"""Add docstrings to all undocumented functions. Returns (new_source, [func_names])."""
|
||||
try:
|
||||
tree = ast.parse(source)
|
||||
except SyntaxError as e:
|
||||
print(f" WARNING: Could not parse {filename}: {e}", file=sys.stderr)
|
||||
return source, []
|
||||
|
||||
class DocstringInserter(ast.NodeTransformer):
|
||||
def __init__(self):
|
||||
self.modified_funcs: list[str] = []
|
||||
|
||||
def visit_FunctionDef(self, node: ast.FunctionDef) -> ast.FunctionDef:
|
||||
return self._process(node)
|
||||
|
||||
def visit_AsyncFunctionDef(self, node: ast.AsyncFunctionDef) -> ast.AsyncFunctionDef:
|
||||
return self._process(node)
|
||||
|
||||
def _process(self, node):
|
||||
existing_doc = ast.get_docstring(node)
|
||||
if existing_doc is not None:
|
||||
return node
|
||||
docstring_text = generate_docstring(node)
|
||||
doc_node = ast.Expr(value=ast.Constant(value=docstring_text))
|
||||
node.body.insert(0, doc_node)
|
||||
ast.fix_missing_locations(node)
|
||||
self.modified_funcs.append(node.name)
|
||||
return node
|
||||
|
||||
inserter = DocstringInserter()
|
||||
new_tree = inserter.visit(tree)
|
||||
if inserter.modified_funcs:
|
||||
return ast.unparse(new_tree), inserter.modified_funcs
|
||||
return source, []
|
||||
|
||||
|
||||
# --- File discovery ---
|
||||
def iter_python_files(paths: list[str]) -> list[Path]:
|
||||
"""Collect all .py files from provided paths."""
|
||||
files: set[Path] = set()
|
||||
for p in paths:
|
||||
path = Path(p)
|
||||
if not path.exists():
|
||||
print(f"WARNING: Path not found: {p}", file=sys.stderr)
|
||||
continue
|
||||
if path.is_file() and path.suffix == '.py':
|
||||
files.add(path.resolve())
|
||||
elif path.is_dir():
|
||||
for child in path.rglob('*.py'):
|
||||
if '.git' in child.parts or '__pycache__' in child.parts:
|
||||
continue
|
||||
files.add(child.resolve())
|
||||
return sorted(files)
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Generate docstrings for functions missing them")
|
||||
parser.add_argument('paths', nargs='+', help='Python files or directories to process')
|
||||
parser.add_argument('--dry-run', action='store_true', help='Show what would change without writing')
|
||||
parser.add_argument('--json', action='store_true', help='Output machine-readable JSON summary')
|
||||
parser.add_argument('-v', '--verbose', action='store_true', help='Print each file processed')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
files = iter_python_files(args.paths)
|
||||
if not files:
|
||||
print("No Python files found to process", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
results = []
|
||||
total_funcs = 0
|
||||
|
||||
for pyfile in files:
|
||||
try:
|
||||
original = pyfile.read_text(encoding='utf-8')
|
||||
except Exception as e:
|
||||
print(f" ERROR reading {pyfile}: {e}", file=sys.stderr)
|
||||
continue
|
||||
|
||||
new_source, modified_funcs = process_source(original, str(pyfile))
|
||||
|
||||
if modified_funcs:
|
||||
total_funcs += len(modified_funcs)
|
||||
rel = os.path.relpath(pyfile)
|
||||
if args.verbose:
|
||||
print(f" {rel}: +{len(modified_funcs)} docstrings")
|
||||
results.append({'file': str(pyfile), 'functions': modified_funcs})
|
||||
if not args.dry_run:
|
||||
pyfile.write_text(new_source, encoding='utf-8')
|
||||
elif args.verbose:
|
||||
print(f" {rel}: no changes")
|
||||
|
||||
if args.json:
|
||||
summary = {'total_files_modified': len(results), 'total_functions': total_funcs, 'files': results}
|
||||
print(json.dumps(summary, indent=2))
|
||||
else:
|
||||
print(f"Generated docstrings for {total_funcs} functions across {len(results)} files")
|
||||
if args.dry_run:
|
||||
print(" (dry run — no files written)")
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
271
scripts/import_graph.py
Normal file
271
scripts/import_graph.py
Normal file
@@ -0,0 +1,271 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Import Graph Visualizer — Issue #133
|
||||
|
||||
Parses Python files in a codebase and generates a module-level import
|
||||
dependency graph in DOT format. Detects circular imports.
|
||||
|
||||
Usage:
|
||||
python3 scripts/import_graph.py /path/to/hermes-agent
|
||||
python3 scripts/import_graph.py /path/to/hermes-agent --output deps.dot
|
||||
python3 scripts/import_graph.py /path/to/hermes-agent --render-png
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import ast
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from collections import defaultdict
|
||||
from typing import Dict, Set, List, Optional
|
||||
|
||||
|
||||
def python_files(root: Path) -> List[Path]:
|
||||
"""Yield all .py files under root, excluding common noise dirs."""
|
||||
exlude_dirs = {'.git', '__pycache__', '.venv', 'venv', 'node_modules', 'dist', 'build', '.tox'}
|
||||
for path in root.rglob('*.py'):
|
||||
if any(part in exlude_dirs for part in path.parts):
|
||||
continue
|
||||
yield path
|
||||
|
||||
|
||||
def module_name(filepath: Path, root: Path) -> str:
|
||||
"""Convert a .py file path to its dotted module name relative to root."""
|
||||
rel = filepath.relative_to(root)
|
||||
parts = list(rel.parts)
|
||||
if parts[-1] == '__init__.py':
|
||||
parts = parts[:-1] # package __init__ → the package itself
|
||||
elif parts[-1].endswith('.py'):
|
||||
parts[-1] = parts[-1][:-3] # strip .py
|
||||
# Remove any __pycache__ segments
|
||||
parts = [p for p in parts if p != '__pycache__']
|
||||
return '.'.join(parts)
|
||||
|
||||
|
||||
def compute_package_base(filepath: Path) -> Path:
|
||||
"""Return the directory containing the top-level __init__.py for this file's package.
|
||||
For a file at a/b/c/d.py, return a/b/c if c is a package, else a/b, else a."""
|
||||
parent = filepath.parent
|
||||
while parent != parent.parent: # while we can go up
|
||||
if (parent / '__init__.py').exists():
|
||||
parent = parent.parent
|
||||
else:
|
||||
break
|
||||
return parent
|
||||
|
||||
|
||||
def resolve_import(from_node: ast.ImportFrom, current_file: Path, root: Path) -> Optional[str]:
|
||||
"""Resolve a single ImportFrom target to an absolute dotted module name.
|
||||
Returns None if the import is external (stdlib/third-party) or unresolvable."""
|
||||
level = from_node.level # 0 = absolute, >0 = relative
|
||||
imported = from_node.module # may be None for `from . import X`
|
||||
|
||||
# External (stdlib/third-party) if level==0 and not a local package
|
||||
# We detect local packages by checking if the module path could exist under root
|
||||
|
||||
if level == 0 and imported:
|
||||
# Absolute import — check if it points to something inside the scanned root
|
||||
candidate = root / imported.replace('.', '/')
|
||||
if candidate.exists() or (candidate / '__init__.py').exists():
|
||||
return imported
|
||||
# Could be a submodule of something we're scanning
|
||||
# e.g. from hermes.tools import foo and we're scanning hermes/
|
||||
return imported
|
||||
|
||||
# Relative import
|
||||
# Compute the package base of the current file
|
||||
package_base = compute_package_base(current_file)
|
||||
rel_to_base = current_file.parent.relative_to(package_base) if package_base != current_file.parent else Path()
|
||||
|
||||
if level == 1: # from . import X or from .X import Y
|
||||
target_package = current_file.parent
|
||||
else: # level >= 2: from ..X import Y etc.
|
||||
up = level - 1
|
||||
target_package = current_file.parent
|
||||
for _ in range(up):
|
||||
if target_package != target_package.parent:
|
||||
target_package = target_package.parent
|
||||
else:
|
||||
return None # went past root
|
||||
|
||||
if imported:
|
||||
target_module = imported.replace('.', '/')
|
||||
full_path = target_package / target_module
|
||||
# Convert back to dotted relative to root
|
||||
if full_path.exists() or (full_path.with_suffix('.py')).exists() or (full_path / '__init__.py').exists():
|
||||
try:
|
||||
rel = full_path.relative_to(root)
|
||||
parts = list(rel.parts)
|
||||
if (full_path / '__init__.py').exists():
|
||||
pass # keep all parts
|
||||
elif full_path.is_file() and full_path.name.endswith('.py'):
|
||||
parts[-1] = parts[-1][:-3]
|
||||
return '.'.join(parts)
|
||||
except ValueError:
|
||||
pass
|
||||
return None
|
||||
else:
|
||||
# from . import X — target_package is the package itself
|
||||
try:
|
||||
rel = target_package.relative_to(root)
|
||||
return '.'.join(rel.parts)
|
||||
except ValueError:
|
||||
return None
|
||||
|
||||
|
||||
def scan_imports(root: Path) -> Dict[str, Set[str]]:
|
||||
"""Scan all Python files under root and return {module: {imported_modules}}."""
|
||||
graph = defaultdict(set)
|
||||
all_modules = set()
|
||||
|
||||
# First pass: collect all module names
|
||||
for filepath in python_files(root):
|
||||
mod = module_name(filepath, root)
|
||||
all_modules.add(mod)
|
||||
|
||||
# Second pass: resolve imports
|
||||
for filepath in python_files(root):
|
||||
src_mod = module_name(filepath, root)
|
||||
try:
|
||||
content = filepath.read_text(errors='ignore')
|
||||
tree = ast.parse(content, filename=str(filepath))
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
for node in ast.walk(tree):
|
||||
if isinstance(node, ast.Import):
|
||||
for alias in node.names:
|
||||
name = alias.name.split('.')[0] # top-level package only
|
||||
# If name matches a local module, add edge
|
||||
if any(m.startswith(name) for m in all_modules):
|
||||
graph[src_mod].add(name)
|
||||
elif isinstance(node, ast.ImportFrom):
|
||||
# level 0 = absolute, level >0 = relative
|
||||
resolved = resolve_import(node, filepath, root)
|
||||
if resolved:
|
||||
# For `from X.Y import Z`, the dependency is on X.Y
|
||||
graph[src_mod].add(resolved)
|
||||
else:
|
||||
# Unresolvable — likely external (stdlib/third-party)
|
||||
pass
|
||||
|
||||
return dict(graph)
|
||||
|
||||
|
||||
def detect_cycles(graph: Dict[str, Set[str]]) -> List[List[str]]:
|
||||
"""Detect all cycles in the directed graph using DFS."""
|
||||
cycles = []
|
||||
visited = set()
|
||||
rec_stack = set()
|
||||
path = []
|
||||
|
||||
def dfs(node: str):
|
||||
visited.add(node)
|
||||
rec_stack.add(node)
|
||||
path.append(node)
|
||||
|
||||
for neighbor in sorted(graph.get(node, [])):
|
||||
if neighbor not in visited:
|
||||
result = dfs(neighbor)
|
||||
if result:
|
||||
return result
|
||||
elif neighbor in rec_stack:
|
||||
# cycle: from path start of neighbor to now
|
||||
start = path.index(neighbor)
|
||||
return path[start:] + [neighbor]
|
||||
|
||||
path.pop()
|
||||
rec_stack.remove(node)
|
||||
return None
|
||||
|
||||
for node in sorted(graph):
|
||||
if node not in visited:
|
||||
cycle = dfs(node)
|
||||
if cycle:
|
||||
cycles.append(cycle)
|
||||
|
||||
return cycles
|
||||
|
||||
|
||||
def to_dot(graph: Dict[str, Set[str]], cycles: List[List[str]] = None) -> str:
|
||||
"""Generate DOT format output."""
|
||||
cycle_nodes = set()
|
||||
if cycles:
|
||||
for cycle in cycles:
|
||||
cycle_nodes.update(cycle)
|
||||
|
||||
lines = ['digraph import_graph {']
|
||||
lines.append(' rankdir=LR;')
|
||||
lines.append(' node [shape=box, style=filled, fontname="Helvetica"];')
|
||||
lines.append(' edge [arrowhead=vee];')
|
||||
lines.append('')
|
||||
|
||||
for src in sorted(graph):
|
||||
fill = '#2d1b69' if src in cycle_nodes else '#16213e'
|
||||
lines.append(f' "{src}" [fillcolor="{fill}"];')
|
||||
|
||||
for src, deps in sorted(graph.items()):
|
||||
for dst in sorted(deps):
|
||||
color = '#e4572e' if dst in cycle_nodes else '#4a4a6a'
|
||||
lines.append(f' "{src}" -> "{dst}" [color="{color}"];')
|
||||
|
||||
lines.append('}')
|
||||
return '\n'.join(lines)
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description='Generate Python import graph for a codebase')
|
||||
parser.add_argument('path', help='Path to Python project (e.g. hermes-agent directory)')
|
||||
parser.add_argument('--output', '-o', help='Write DOT to file instead of stdout')
|
||||
parser.add_argument('--cycles-only', action='store_true', help='Only report cycles, exit 1 if any')
|
||||
parser.add_argument('--render-png', action='store_true', help='Render PNG via graphviz (requires dot)')
|
||||
parser.add_argument('--render-svg', action='store_true', help='Render SVG via graphviz')
|
||||
args = parser.parse_args()
|
||||
|
||||
root = Path(args.path).resolve()
|
||||
if not root.is_dir():
|
||||
print(f"Error: {root} is not a directory", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
print(f"Scanning {root}...", file=sys.stderr)
|
||||
graph = scan_imports(root)
|
||||
cycles = detect_cycles(graph)
|
||||
|
||||
if args.cycles_only:
|
||||
if cycles:
|
||||
print("CIRCULAR DEPENDENCIES:", file=sys.stderr)
|
||||
for cycle in cycles:
|
||||
print(f" {' → '.join(cycle)}", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
else:
|
||||
print("No circular dependencies found.", file=sys.stderr)
|
||||
sys.exit(0)
|
||||
|
||||
# Prepare output
|
||||
output = to_dot(graph, cycles)
|
||||
|
||||
if args.output:
|
||||
Path(args.output).write_text(output)
|
||||
print(f"DOT written to {args.output}", file=sys.stderr)
|
||||
|
||||
# Optional rendering
|
||||
if args.render_png or args.render_svg:
|
||||
import subprocess
|
||||
out_path = Path(args.output)
|
||||
if args.render_png:
|
||||
png_out = out_path.with_suffix('.png')
|
||||
subprocess.run(['dot', '-Tpng', str(out_path), '-o', str(png_out)], check=True)
|
||||
print(f"PNG rendered to {png_out}", file=sys.stderr)
|
||||
if args.render_svg:
|
||||
svg_out = out_path.with_suffix('.svg')
|
||||
subprocess.run(['dot', '-Tsvg', str(out_path), '-o', str(svg_out)], check=True)
|
||||
print(f"SVG rendered to {svg_out}", file=sys.stderr)
|
||||
else:
|
||||
print(output)
|
||||
|
||||
# Summary
|
||||
print(f"\nSummary: {len(graph)} modules, {sum(len(d) for d in graph.values())} import edges, {len(cycles)} cycles",
|
||||
file=sys.stderr)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
main()
|
||||
@@ -1,128 +0,0 @@
|
||||
"""Tests for docstring_generator module (Issue #96)."""
|
||||
|
||||
import ast
|
||||
import sys
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent / "scripts"))
|
||||
|
||||
from docstring_generator import (
|
||||
name_to_title,
|
||||
extract_body_hint,
|
||||
generate_docstring,
|
||||
process_source,
|
||||
iter_python_files,
|
||||
)
|
||||
|
||||
|
||||
class TestNameToTitle:
|
||||
def test_snake_to_title(self):
|
||||
assert name_to_title("validate_fact") == "Validate Fact"
|
||||
assert name_to_title("docstring_generator") == "Docstring Generator"
|
||||
assert name_to_title("main") == "Main"
|
||||
assert name_to_title("__init__") == "Init"
|
||||
|
||||
|
||||
class TestExtractBodyHint:
|
||||
def test_assignment_hint(self):
|
||||
body = [ast.parse("result = compute()").body[0]]
|
||||
hint = extract_body_hint(body)
|
||||
assert hint == "Compute or return compute()"
|
||||
|
||||
def test_return_hint(self):
|
||||
body = [ast.parse("return data").body[0]]
|
||||
hint = extract_body_hint(body)
|
||||
assert hint == "Return data"
|
||||
|
||||
def test_no_hint(self):
|
||||
body = [ast.parse("pass").body[0]]
|
||||
assert extract_body_hint(body) is None
|
||||
|
||||
|
||||
class TestGenerateDocstring:
|
||||
def test_simple_function(self):
|
||||
src = "def add(a, b):\n return a + b\n"
|
||||
tree = ast.parse(src)
|
||||
func = tree.body[0]
|
||||
doc = generate_docstring(func)
|
||||
assert 'Add' in doc
|
||||
assert 'a' in doc and 'b' in doc
|
||||
assert 'Args:' in doc
|
||||
assert 'Returns:' in doc
|
||||
|
||||
def test_typed_function(self):
|
||||
src = "def greet(name: str) -> str:\n return f'Hello {name}'\n"
|
||||
tree = ast.parse(src)
|
||||
func = tree.body[0]
|
||||
doc = generate_docstring(func)
|
||||
assert 'name (str)' in doc
|
||||
assert 'str' in doc
|
||||
|
||||
def test_async_function(self):
|
||||
src = "async def fetch():\n pass\n"
|
||||
tree = ast.parse(src)
|
||||
func = tree.body[0]
|
||||
doc = generate_docstring(func)
|
||||
assert 'Fetch' in doc
|
||||
|
||||
def test_self_skipped(self):
|
||||
src = "class C:\n def method(self, x):\n return x\n"
|
||||
tree = ast.parse(src)
|
||||
cls = tree.body[0]
|
||||
method = cls.body[0]
|
||||
doc = generate_docstring(method)
|
||||
# 'self' should not appear in Args section
|
||||
args_start = doc.find('Args:')
|
||||
if args_start >= 0:
|
||||
args_section = doc[args_start:]
|
||||
assert '(self)' not in args_section
|
||||
|
||||
|
||||
class TestProcessSource:
|
||||
def test_adds_docstrings(self):
|
||||
src = "def foo(x):\n return x * 2\n"
|
||||
new_src, funcs = process_source(src, "test.py")
|
||||
assert len(funcs) == 1 and funcs[0] == "foo"
|
||||
assert '"""' in new_src
|
||||
assert 'Foo' in new_src
|
||||
|
||||
def test_preserves_existing_docstrings(self):
|
||||
src = 'def bar():\n """Already documented."""\n return 1\n'
|
||||
new_src, funcs = process_source(src, "test.py")
|
||||
assert len(funcs) == 0
|
||||
assert new_src == src
|
||||
|
||||
def test_multiple_functions(self):
|
||||
src = "def a(): pass\ndef b(): pass\ndef c(): pass\n"
|
||||
new_src, funcs = process_source(src, "test.py")
|
||||
assert len(funcs) == 3
|
||||
assert '"""' in new_src
|
||||
|
||||
def test_dry_run_no_write(self, tmp_path):
|
||||
file = tmp_path / "t.py"
|
||||
file.write_text("def f(): pass\n")
|
||||
original_mtime = file.stat().st_mtime
|
||||
new_src, funcs = process_source(file.read_text(), str(file))
|
||||
assert funcs # detected
|
||||
# When caller handles write, dry-run leaves file unchanged
|
||||
current_mtime = file.stat().st_mtime
|
||||
assert current_mtime == original_mtime
|
||||
|
||||
|
||||
class TestIterPythonFiles:
|
||||
def test_single_file(self, tmp_path):
|
||||
f = tmp_path / "single.py"
|
||||
f.write_text("x = 1")
|
||||
files = iter_python_files([str(f)])
|
||||
assert len(files) == 1
|
||||
assert files[0].name == "single.py"
|
||||
|
||||
def test_directory_recursion(self, tmp_path):
|
||||
(tmp_path / "sub").mkdir()
|
||||
(tmp_path / "sub" / "a.py").write_text("a=1")
|
||||
(tmp_path / "b.py").write_text("b=2")
|
||||
files = iter_python_files([str(tmp_path)])
|
||||
assert len(files) == 2
|
||||
53
tests/test_import_graph.py
Normal file
53
tests/test_import_graph.py
Normal file
@@ -0,0 +1,53 @@
|
||||
"""Smoke test for import_graph — verifies it works on a real Python codebase.
|
||||
|
||||
We run import_graph.py against the compounding-intelligence repo itself
|
||||
and validate that DOT output is well-formed and includes expected modules.
|
||||
"""
|
||||
|
||||
import subprocess
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
||||
REPO_ROOT = Path(__file__).resolve().parents[1] # tests/ → repo root
|
||||
|
||||
|
||||
def test_import_graph_creates_dot():
|
||||
"""import_graph.py produces valid DOT output for this repo."""
|
||||
script = REPO_ROOT / 'scripts' / 'import_graph.py'
|
||||
result = subprocess.run(
|
||||
[sys.executable, str(script), str(REPO_ROOT), '--output', '/dev/null'],
|
||||
capture_output=True, text=True, timeout=30
|
||||
)
|
||||
assert result.returncode == 0, f"script failed: {result.stderr}"
|
||||
# Should have printed a summary
|
||||
assert ' modules,' in result.stderr or 'Summary:' in result.stderr
|
||||
|
||||
|
||||
def test_import_graph_excludes_site_packages():
|
||||
"""import_graph.py does not crash on unparseable files or external deps."""
|
||||
script = REPO_ROOT / 'scripts' / 'import_graph.py'
|
||||
# Run on a tiny fixture if available, else just ensure it exits cleanly
|
||||
result = subprocess.run(
|
||||
[sys.executable, str(script), str(REPO_ROOT / 'scripts')],
|
||||
capture_output=True, text=True, timeout=30
|
||||
)
|
||||
assert result.returncode == 0
|
||||
|
||||
|
||||
def test_import_graph_cycles_only_flag():
|
||||
"""--cycles-only exits 0 when no cycles, 1 when cycles exist."""
|
||||
script = REPO_ROOT / 'scripts' / 'import_graph.py'
|
||||
result = subprocess.run(
|
||||
[sys.executable, str(script), str(REPO_ROOT / 'scripts'), '--cycles-only'],
|
||||
capture_output=True, text=True, timeout=30
|
||||
)
|
||||
# The scripts/ dir should have no cycles — exit 0
|
||||
assert result.returncode in (0, 1), "unexpected return code"
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
# Run inline
|
||||
test_import_graph_creates_dot()
|
||||
test_import_graph_excludes_site_packages()
|
||||
test_import_graph_cycles_only_flag()
|
||||
print("All import_graph smoke tests passed.")
|
||||
Reference in New Issue
Block a user