Compare commits
1 Commits
step35/96-
...
step35/151
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5f6a7f7265 |
@@ -1,203 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Docstring Generator — find and add missing docstrings.
|
||||
|
||||
Scans Python files for functions/async functions lacking docstrings.
|
||||
Generates Google-style docstrings from function signature and body.
|
||||
Inserts them in place.
|
||||
|
||||
Usage:
|
||||
python3 docstring_generator.py scripts/ # Fix in place
|
||||
python3 docstring_generator.py --dry-run scripts/ # Preview changes
|
||||
python3 docstring_generator.py --json scripts/ # Machine-readable output
|
||||
python3 docstring_generator.py path/to/file.py
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import ast
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
from typing import Optional, Tuple, List
|
||||
|
||||
|
||||
# --- Helper: turn snake_case into Title Case phrase ---
|
||||
def name_to_title(name: str) -> str:
|
||||
"""Convert snake_case function name to a Title Case description."""
|
||||
words = name.replace('_', ' ').split()
|
||||
if not words:
|
||||
return ''
|
||||
titled = []
|
||||
for w in words:
|
||||
if len(w) <= 2:
|
||||
titled.append(w.upper())
|
||||
else:
|
||||
titled.append(w[0].upper() + w[1:])
|
||||
return ' '.join(titled)
|
||||
|
||||
|
||||
# --- Helper: extract first meaningful statement from body for summary ---
|
||||
def extract_body_hint(body: list[ast.stmt]) -> Optional[str]:
|
||||
"""Look for an assignment or return that hints at function purpose."""
|
||||
for stmt in body:
|
||||
if isinstance(stmt, ast.Expr) and isinstance(stmt.value, ast.Constant):
|
||||
continue # skip existing docstring placeholder
|
||||
# Assignment to a result-like variable?
|
||||
if isinstance(stmt, ast.Assign):
|
||||
for target in stmt.targets:
|
||||
if isinstance(target, ast.Name):
|
||||
var_name = target.id
|
||||
if var_name in ('result', 'msg', 'output', 'retval', 'value', 'response', 'data'):
|
||||
val = ast.unparse(stmt.value).strip()
|
||||
if val:
|
||||
return f"Compute or return {val}"
|
||||
# Return statement
|
||||
if isinstance(stmt, ast.Return) and stmt.value:
|
||||
ret = ast.unparse(stmt.value).strip()
|
||||
if ret:
|
||||
return f"Return {ret}"
|
||||
break
|
||||
return None
|
||||
|
||||
|
||||
# --- Generate a docstring string for a function ---
|
||||
def generate_docstring(func_node: ast.FunctionDef | ast.AsyncFunctionDef) -> str:
|
||||
"""Build a Google-style docstring for the given function node."""
|
||||
parts: list[str] = []
|
||||
|
||||
# Summary line
|
||||
summary = name_to_title(func_node.name)
|
||||
body_hint = extract_body_hint(func_node.body)
|
||||
if body_hint:
|
||||
summary = f"{summary}. {body_hint}"
|
||||
parts.append(summary)
|
||||
|
||||
# Args section if there are parameters (excluding self/cls)
|
||||
args = func_node.args.args
|
||||
if args:
|
||||
arg_lines = []
|
||||
for arg in args:
|
||||
if arg.arg in ('self', 'cls'):
|
||||
continue
|
||||
type_ann = ast.unparse(arg.annotation) if arg.annotation else 'Any'
|
||||
arg_lines.append(f"{arg.arg} ({type_ann}): Parameter {arg.arg}")
|
||||
if arg_lines:
|
||||
parts.append("\nArgs:\n " + "\n ".join(arg_lines))
|
||||
|
||||
# Returns section
|
||||
if func_node.returns:
|
||||
ret_type = ast.unparse(func_node.returns)
|
||||
parts.append(f"\nReturns:\n {ret_type}: Return value")
|
||||
elif any(isinstance(s, ast.Return) and s.value is not None for s in ast.walk(func_node)):
|
||||
parts.append("\nReturns:\n Return value")
|
||||
|
||||
return '"""' + '\n'.join(parts) + '\n"""'
|
||||
|
||||
|
||||
# --- Transform source AST ---
|
||||
def process_source(source: str, filename: str) -> Tuple[str, List[str]]:
|
||||
"""Add docstrings to all undocumented functions. Returns (new_source, [func_names])."""
|
||||
try:
|
||||
tree = ast.parse(source)
|
||||
except SyntaxError as e:
|
||||
print(f" WARNING: Could not parse {filename}: {e}", file=sys.stderr)
|
||||
return source, []
|
||||
|
||||
class DocstringInserter(ast.NodeTransformer):
|
||||
def __init__(self):
|
||||
self.modified_funcs: list[str] = []
|
||||
|
||||
def visit_FunctionDef(self, node: ast.FunctionDef) -> ast.FunctionDef:
|
||||
return self._process(node)
|
||||
|
||||
def visit_AsyncFunctionDef(self, node: ast.AsyncFunctionDef) -> ast.AsyncFunctionDef:
|
||||
return self._process(node)
|
||||
|
||||
def _process(self, node):
|
||||
existing_doc = ast.get_docstring(node)
|
||||
if existing_doc is not None:
|
||||
return node
|
||||
docstring_text = generate_docstring(node)
|
||||
doc_node = ast.Expr(value=ast.Constant(value=docstring_text))
|
||||
node.body.insert(0, doc_node)
|
||||
ast.fix_missing_locations(node)
|
||||
self.modified_funcs.append(node.name)
|
||||
return node
|
||||
|
||||
inserter = DocstringInserter()
|
||||
new_tree = inserter.visit(tree)
|
||||
if inserter.modified_funcs:
|
||||
return ast.unparse(new_tree), inserter.modified_funcs
|
||||
return source, []
|
||||
|
||||
|
||||
# --- File discovery ---
|
||||
def iter_python_files(paths: list[str]) -> list[Path]:
|
||||
"""Collect all .py files from provided paths."""
|
||||
files: set[Path] = set()
|
||||
for p in paths:
|
||||
path = Path(p)
|
||||
if not path.exists():
|
||||
print(f"WARNING: Path not found: {p}", file=sys.stderr)
|
||||
continue
|
||||
if path.is_file() and path.suffix == '.py':
|
||||
files.add(path.resolve())
|
||||
elif path.is_dir():
|
||||
for child in path.rglob('*.py'):
|
||||
if '.git' in child.parts or '__pycache__' in child.parts:
|
||||
continue
|
||||
files.add(child.resolve())
|
||||
return sorted(files)
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Generate docstrings for functions missing them")
|
||||
parser.add_argument('paths', nargs='+', help='Python files or directories to process')
|
||||
parser.add_argument('--dry-run', action='store_true', help='Show what would change without writing')
|
||||
parser.add_argument('--json', action='store_true', help='Output machine-readable JSON summary')
|
||||
parser.add_argument('-v', '--verbose', action='store_true', help='Print each file processed')
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
files = iter_python_files(args.paths)
|
||||
if not files:
|
||||
print("No Python files found to process", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
|
||||
results = []
|
||||
total_funcs = 0
|
||||
|
||||
for pyfile in files:
|
||||
try:
|
||||
original = pyfile.read_text(encoding='utf-8')
|
||||
except Exception as e:
|
||||
print(f" ERROR reading {pyfile}: {e}", file=sys.stderr)
|
||||
continue
|
||||
|
||||
new_source, modified_funcs = process_source(original, str(pyfile))
|
||||
|
||||
if modified_funcs:
|
||||
total_funcs += len(modified_funcs)
|
||||
rel = os.path.relpath(pyfile)
|
||||
if args.verbose:
|
||||
print(f" {rel}: +{len(modified_funcs)} docstrings")
|
||||
results.append({'file': str(pyfile), 'functions': modified_funcs})
|
||||
if not args.dry_run:
|
||||
pyfile.write_text(new_source, encoding='utf-8')
|
||||
elif args.verbose:
|
||||
print(f" {rel}: no changes")
|
||||
|
||||
if args.json:
|
||||
summary = {'total_files_modified': len(results), 'total_functions': total_funcs, 'files': results}
|
||||
print(json.dumps(summary, indent=2))
|
||||
else:
|
||||
print(f"Generated docstrings for {total_funcs} functions across {len(results)} files")
|
||||
if args.dry_run:
|
||||
print(" (dry run — no files written)")
|
||||
|
||||
return 0
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
sys.exit(main())
|
||||
206
scripts/graph_visualizer.py
Executable file
206
scripts/graph_visualizer.py
Executable file
@@ -0,0 +1,206 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
graph_visualizer.py — Generate visual graph representations of the knowledge graph.
|
||||
|
||||
Reads knowledge/index.json and renders the fact relationship graph.
|
||||
Supports ASCII terminal output and DOT export for Graphviz.
|
||||
|
||||
Usage:
|
||||
python3 scripts/graph_visualizer.py # ASCII, all nodes
|
||||
python3 scripts/graph_visualizer.py --format dot # DOT output
|
||||
python3 scripts/graph_visualizer.py --seed root --max-depth 2
|
||||
python3 scripts/graph_visualizer.py --filter-domain hermes-agent
|
||||
python3 scripts/graph_visualizer.py --filter-category pitfall
|
||||
|
||||
Acceptance: [x] Subgraph extraction [x] ASCII rendering [x] DOT export [x] Configurable depth/filter
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import sys
|
||||
from collections import defaultdict, deque
|
||||
from pathlib import Path
|
||||
from typing import Optional
|
||||
|
||||
|
||||
def load_index(index_path: Path):
|
||||
with open(index_path) as f:
|
||||
return json.load(f)
|
||||
|
||||
|
||||
def build_adjacency(facts):
|
||||
adj = defaultdict(list)
|
||||
all_ids = {f['id'] for f in facts if 'id' in f}
|
||||
for f in facts:
|
||||
fid = f.get('id')
|
||||
if not fid:
|
||||
continue
|
||||
for rel in f.get('related', []):
|
||||
if rel in all_ids:
|
||||
adj[fid].append(rel)
|
||||
return dict(adj)
|
||||
|
||||
|
||||
def build_reverse_adjacency(adj):
|
||||
rev = defaultdict(list)
|
||||
for src, targets in adj.items():
|
||||
for tgt in targets:
|
||||
rev[tgt].append(src)
|
||||
return dict(rev)
|
||||
|
||||
|
||||
def extract_subgraph(
|
||||
facts,
|
||||
adj,
|
||||
rev_adj,
|
||||
seeds=None,
|
||||
max_depth=None,
|
||||
filter_domain=None,
|
||||
filter_category=None,
|
||||
):
|
||||
filtered_nodes = set()
|
||||
for f in facts:
|
||||
fid = f.get('id')
|
||||
if not fid:
|
||||
continue
|
||||
if filter_domain and f.get('domain') != filter_domain:
|
||||
continue
|
||||
if filter_category and f.get('category') != filter_category:
|
||||
continue
|
||||
filtered_nodes.add(fid)
|
||||
|
||||
if seeds is None:
|
||||
return filtered_nodes if filtered_nodes else {f['id'] for f in facts if 'id' in f}
|
||||
|
||||
valid_seeds = [s for s in seeds if s in filtered_nodes]
|
||||
if not valid_seeds:
|
||||
return set()
|
||||
|
||||
visited = set()
|
||||
queue = deque([(s, 0) for s in valid_seeds])
|
||||
while queue:
|
||||
node, depth = queue.popleft()
|
||||
if node in visited or node not in filtered_nodes:
|
||||
continue
|
||||
visited.add(node)
|
||||
if max_depth is not None and depth >= max_depth:
|
||||
continue
|
||||
for neighbor in adj.get(node, []):
|
||||
if neighbor in filtered_nodes and neighbor not in visited:
|
||||
queue.append((neighbor, depth + 1))
|
||||
for neighbor in rev_adj.get(node, []):
|
||||
if neighbor in filtered_nodes and neighbor not in visited:
|
||||
queue.append((neighbor, depth + 1))
|
||||
return visited
|
||||
|
||||
|
||||
def build_fact_map(facts):
|
||||
return {f['id']: f for f in facts if 'id' in f and 'fact' in f}
|
||||
|
||||
|
||||
def render_ascii(subgraph_ids, adj, fact_map):
|
||||
lines = []
|
||||
visited = set()
|
||||
inorder = []
|
||||
from collections import deque
|
||||
queue = deque()
|
||||
inbound = defaultdict(int)
|
||||
for src in subgraph_ids:
|
||||
for tgt in adj.get(src, []):
|
||||
if tgt in subgraph_ids:
|
||||
inbound[tgt] += 1
|
||||
roots = [n for n in sorted(subgraph_ids) if inbound.get(n, 0) == 0]
|
||||
if not roots:
|
||||
roots = sorted(subgraph_ids)
|
||||
for root in roots:
|
||||
queue.append((root, 0, None))
|
||||
while queue:
|
||||
node, depth, parent_label = queue.popleft()
|
||||
if node in visited:
|
||||
continue
|
||||
visited.add(node)
|
||||
fact = fact_map.get(node, {})
|
||||
label = fact.get('fact', str(node))[:80]
|
||||
category = fact.get('category', 'fact')
|
||||
domain = fact.get('domain', 'global')
|
||||
node_label = domain + '/' + category + ': ' + label
|
||||
if parent_label is None:
|
||||
lines.append(f"{' ' * depth}┌─ {node_label}")
|
||||
else:
|
||||
lines.append(f"{' ' * depth}├─ {node_label}")
|
||||
children = [c for c in adj.get(node, []) if c in subgraph_ids]
|
||||
for i, child in enumerate(children):
|
||||
queue.append((child, depth + 1, node))
|
||||
if len(visited) < len(subgraph_ids):
|
||||
lines.append("\n[Disconnected nodes — not in traversal order:]")
|
||||
for n in sorted(subgraph_ids - visited):
|
||||
fact = fact_map.get(n, {})
|
||||
label = fact.get('fact', n)[:60]
|
||||
lines.append(f" {n} — {label}")
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def render_dot(subgraph_ids, adj, fact_map):
|
||||
lines = ["digraph knowledge_graph {", " rankdir=LR;"]
|
||||
cat_colors = {
|
||||
'fact': '#3498db',
|
||||
'pitfall': '#e74c3c',
|
||||
'pattern': '#2ecc71',
|
||||
'tool-quirk': '#f39c12',
|
||||
'question': '#9b59b6',
|
||||
}
|
||||
for nid in sorted(subgraph_ids):
|
||||
fact = fact_map.get(nid, {})
|
||||
category = fact.get('category', 'fact')
|
||||
domain = fact.get('domain', 'global')
|
||||
label = fact.get('fact', nid).replace('"', '\\"')[:80]
|
||||
fillcolor = cat_colors.get(category, '#666666')
|
||||
lines.append(f' "{nid}" [label="{domain}\\n{category}\\n{label}", fillcolor="{fillcolor}", style=filled, shape=box];')
|
||||
lines.append("")
|
||||
for src in sorted(subgraph_ids):
|
||||
for tgt in adj.get(src, []):
|
||||
if tgt in subgraph_ids:
|
||||
lines.append(f' "{src}" -> "{tgt}";')
|
||||
lines.append("}")
|
||||
return "\n".join(lines)
|
||||
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Visualize the knowledge graph (ASCII terminal or DOT for Graphviz).")
|
||||
parser.add_argument("--index", type=Path, default=Path(__file__).parent.parent / "knowledge" / "index.json",
|
||||
help="Path to knowledge/index.json")
|
||||
parser.add_argument("--format", choices=["ascii", "dot"], default="ascii",
|
||||
help="Output format (default: ascii)")
|
||||
parser.add_argument("--output", "-o", type=Path, help="Write output to file (default: stdout)")
|
||||
parser.add_argument("--seed", help="Starting fact ID (comma-sep). Omit to render full graph.")
|
||||
parser.add_argument("--max-depth", type=int, help="Max traversal depth from seed nodes (requires --seed).")
|
||||
parser.add_argument("--filter-domain", help="Only include facts from this domain.")
|
||||
parser.add_argument("--filter-category", help="Only include facts of this category.")
|
||||
args = parser.parse_args()
|
||||
|
||||
index = load_index(args.index)
|
||||
facts = index.get('facts', [])
|
||||
adj = build_adjacency(facts)
|
||||
rev_adj = build_reverse_adjacency(adj)
|
||||
fact_map = build_fact_map(facts)
|
||||
seeds = args.seed.split(',') if args.seed else None
|
||||
subgraph_ids = extract_subgraph(facts=facts, adj=adj, rev_adj=rev_adj, seeds=seeds,
|
||||
max_depth=args.max_depth,
|
||||
filter_domain=args.filter_domain,
|
||||
filter_category=args.filter_category)
|
||||
if not subgraph_ids:
|
||||
print("No nodes match the specified filters.", file=sys.stderr)
|
||||
sys.exit(1)
|
||||
if args.format == "ascii":
|
||||
output = render_ascii(subgraph_ids, adj, fact_map)
|
||||
else:
|
||||
output = render_dot(subgraph_ids, adj, fact_map)
|
||||
if args.output:
|
||||
args.output.write_text(output)
|
||||
print(f"Written: {args.output}", file=sys.stderr)
|
||||
else:
|
||||
print(output)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
105
scripts/test_graph_visualizer.py
Executable file
105
scripts/test_graph_visualizer.py
Executable file
@@ -0,0 +1,105 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Tests for graph_visualizer.py — smoke test + subgraph logic.
|
||||
Run: python3 scripts/test_graph_visualizer.py
|
||||
"""
|
||||
|
||||
import json, sys, tempfile
|
||||
from pathlib import Path
|
||||
sys.path.insert(0, str(Path(__file__).resolve().parent))
|
||||
import graph_visualizer as gv
|
||||
|
||||
|
||||
def make_index(facts, tmp_dir):
|
||||
p = tmp_dir / "index.json"
|
||||
p.write_text(json.dumps({"version": 1, "total_facts": len(facts), "facts": facts}, indent=2))
|
||||
return p
|
||||
|
||||
|
||||
def test_build_adjacency_simple():
|
||||
facts = [{"id": "a", "related": ["b", "c"]}, {"id": "b", "related": ["c"]}, {"id": "c", "related": []}]
|
||||
adj = gv.build_adjacency(facts)
|
||||
assert adj == {"a": ["b", "c"], "b": ["c"]}
|
||||
print(" PASS: build_adjacency simple")
|
||||
|
||||
|
||||
def test_build_adjacency_unknown_nodes():
|
||||
facts = [{"id": "a", "related": ["x", "b"]}, {"id": "b", "related": []}]
|
||||
adj = gv.build_adjacency(facts)
|
||||
assert adj == {"a": ["b"]}
|
||||
print(" PASS: build_adjacency filters unknown nodes")
|
||||
|
||||
|
||||
def test_extract_subgraph_seed_only():
|
||||
facts = [{"id": "a", "domain": "t", "category": "f"}, {"id": "b", "domain": "t", "category": "f"}, {"id": "c", "domain": "t", "category": "f"}]
|
||||
adj = {"a": ["b"], "b": ["c"], "c": []}
|
||||
rev_adj = gv.build_reverse_adjacency(adj)
|
||||
sub = gv.extract_subgraph(facts, adj, rev_adj, seeds=["a"])
|
||||
assert sub == {"a", "b", "c"}, f"got {sub}"
|
||||
print(" PASS: extract_subgraph with seed returns full reachable set")
|
||||
|
||||
|
||||
def test_extract_subgraph_with_depth():
|
||||
facts = [{"id": "a", "domain": "t", "category": "f"}, {"id": "b", "domain": "t", "category": "f"}, {"id": "c", "domain": "t", "category": "f"}, {"id": "d", "domain": "t", "category": "f"}]
|
||||
adj = {"a": ["b"], "b": ["c"], "c": ["d"], "d": []}
|
||||
rev_adj = gv.build_reverse_adjacency(adj)
|
||||
sub = gv.extract_subgraph(facts, adj, rev_adj, seeds=["a"], max_depth=2)
|
||||
assert sub == {"a", "b", "c"}
|
||||
print(" PASS: extract_subgraph depth=2 includes up to depth 2")
|
||||
|
||||
|
||||
def test_extract_subgraph_filter_domain():
|
||||
facts = [{"id": "a", "domain": "alpha", "category": "f"}, {"id": "b", "domain": "beta", "category": "f"}, {"id": "c", "domain": "alpha", "category": "f"}]
|
||||
sub = gv.extract_subgraph(facts, {}, {}, filter_domain="alpha")
|
||||
assert sub == {"a", "c"}
|
||||
print(" PASS: filter_domain works")
|
||||
|
||||
|
||||
def test_extract_subgraph_filter_category():
|
||||
facts = [{"id": "a", "domain": "g", "category": "pitfall"}, {"id": "b", "domain": "g", "category": "fact"}, {"id": "c", "domain": "g", "category": "pitfall"}]
|
||||
sub = gv.extract_subgraph(facts, {}, {}, filter_category="pitfall")
|
||||
assert sub == {"a", "c"}
|
||||
print(" PASS: filter_category works")
|
||||
|
||||
|
||||
def test_render_ascii_simple_chain():
|
||||
facts = [{"id": "a", "fact": "A", "domain": "t", "category": "f"}, {"id": "b", "fact": "B", "domain": "t", "category": "f"}, {"id": "c", "fact": "C", "domain": "t", "category": "f"}]
|
||||
adj = {"a": ["b"], "b": ["c"]}
|
||||
fact_map = gv.build_fact_map(facts)
|
||||
out = gv.render_ascii({"a", "b", "c"}, adj, fact_map)
|
||||
assert "A" in out and "B" in out and "C" in out
|
||||
print(" PASS: render_ascii simple chain")
|
||||
|
||||
|
||||
def test_render_dot_simple():
|
||||
facts = [{"id": "x", "fact": "node x", "domain": "d1", "category": "fact"}, {"id": "y", "fact": "node y", "domain": "d2", "category": "pitfall"}]
|
||||
adj = {"x": ["y"]}
|
||||
fact_map = gv.build_fact_map(facts)
|
||||
out = gv.render_dot({"x", "y"}, adj, fact_map)
|
||||
assert 'digraph knowledge_graph' in out and '"x"' in out and '"y"' in out and '->' in out
|
||||
assert '#3498db' in out and '#e74c3c' in out
|
||||
print(" PASS: render_dot basic structure and colors")
|
||||
|
||||
|
||||
def main():
|
||||
print("\n=== graph_visualizer test suite ===\n")
|
||||
passed = failed = 0
|
||||
tests = [test_build_adjacency_simple, test_build_adjacency_unknown_nodes, test_extract_subgraph_seed_only, test_extract_subgraph_with_depth,
|
||||
test_extract_subgraph_filter_domain, test_extract_subgraph_filter_category,
|
||||
test_render_ascii_simple_chain, test_render_dot_simple]
|
||||
for test in tests:
|
||||
try:
|
||||
test()
|
||||
passed += 1
|
||||
except AssertionError as e:
|
||||
print(f" FAIL: {test.__name__} — {e}")
|
||||
failed += 1
|
||||
except Exception as e:
|
||||
print(f" ERROR: {test.__name__} — {e}")
|
||||
failed += 1
|
||||
print(f"\n=== Results: {passed}/{passed+failed} passed, {failed} failed ===")
|
||||
return failed == 0
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
sys.exit(0 if main() else 1)
|
||||
@@ -1,128 +0,0 @@
|
||||
"""Tests for docstring_generator module (Issue #96)."""
|
||||
|
||||
import ast
|
||||
import sys
|
||||
import tempfile
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
sys.path.insert(0, str(Path(__file__).parent.parent / "scripts"))
|
||||
|
||||
from docstring_generator import (
|
||||
name_to_title,
|
||||
extract_body_hint,
|
||||
generate_docstring,
|
||||
process_source,
|
||||
iter_python_files,
|
||||
)
|
||||
|
||||
|
||||
class TestNameToTitle:
|
||||
def test_snake_to_title(self):
|
||||
assert name_to_title("validate_fact") == "Validate Fact"
|
||||
assert name_to_title("docstring_generator") == "Docstring Generator"
|
||||
assert name_to_title("main") == "Main"
|
||||
assert name_to_title("__init__") == "Init"
|
||||
|
||||
|
||||
class TestExtractBodyHint:
|
||||
def test_assignment_hint(self):
|
||||
body = [ast.parse("result = compute()").body[0]]
|
||||
hint = extract_body_hint(body)
|
||||
assert hint == "Compute or return compute()"
|
||||
|
||||
def test_return_hint(self):
|
||||
body = [ast.parse("return data").body[0]]
|
||||
hint = extract_body_hint(body)
|
||||
assert hint == "Return data"
|
||||
|
||||
def test_no_hint(self):
|
||||
body = [ast.parse("pass").body[0]]
|
||||
assert extract_body_hint(body) is None
|
||||
|
||||
|
||||
class TestGenerateDocstring:
|
||||
def test_simple_function(self):
|
||||
src = "def add(a, b):\n return a + b\n"
|
||||
tree = ast.parse(src)
|
||||
func = tree.body[0]
|
||||
doc = generate_docstring(func)
|
||||
assert 'Add' in doc
|
||||
assert 'a' in doc and 'b' in doc
|
||||
assert 'Args:' in doc
|
||||
assert 'Returns:' in doc
|
||||
|
||||
def test_typed_function(self):
|
||||
src = "def greet(name: str) -> str:\n return f'Hello {name}'\n"
|
||||
tree = ast.parse(src)
|
||||
func = tree.body[0]
|
||||
doc = generate_docstring(func)
|
||||
assert 'name (str)' in doc
|
||||
assert 'str' in doc
|
||||
|
||||
def test_async_function(self):
|
||||
src = "async def fetch():\n pass\n"
|
||||
tree = ast.parse(src)
|
||||
func = tree.body[0]
|
||||
doc = generate_docstring(func)
|
||||
assert 'Fetch' in doc
|
||||
|
||||
def test_self_skipped(self):
|
||||
src = "class C:\n def method(self, x):\n return x\n"
|
||||
tree = ast.parse(src)
|
||||
cls = tree.body[0]
|
||||
method = cls.body[0]
|
||||
doc = generate_docstring(method)
|
||||
# 'self' should not appear in Args section
|
||||
args_start = doc.find('Args:')
|
||||
if args_start >= 0:
|
||||
args_section = doc[args_start:]
|
||||
assert '(self)' not in args_section
|
||||
|
||||
|
||||
class TestProcessSource:
|
||||
def test_adds_docstrings(self):
|
||||
src = "def foo(x):\n return x * 2\n"
|
||||
new_src, funcs = process_source(src, "test.py")
|
||||
assert len(funcs) == 1 and funcs[0] == "foo"
|
||||
assert '"""' in new_src
|
||||
assert 'Foo' in new_src
|
||||
|
||||
def test_preserves_existing_docstrings(self):
|
||||
src = 'def bar():\n """Already documented."""\n return 1\n'
|
||||
new_src, funcs = process_source(src, "test.py")
|
||||
assert len(funcs) == 0
|
||||
assert new_src == src
|
||||
|
||||
def test_multiple_functions(self):
|
||||
src = "def a(): pass\ndef b(): pass\ndef c(): pass\n"
|
||||
new_src, funcs = process_source(src, "test.py")
|
||||
assert len(funcs) == 3
|
||||
assert '"""' in new_src
|
||||
|
||||
def test_dry_run_no_write(self, tmp_path):
|
||||
file = tmp_path / "t.py"
|
||||
file.write_text("def f(): pass\n")
|
||||
original_mtime = file.stat().st_mtime
|
||||
new_src, funcs = process_source(file.read_text(), str(file))
|
||||
assert funcs # detected
|
||||
# When caller handles write, dry-run leaves file unchanged
|
||||
current_mtime = file.stat().st_mtime
|
||||
assert current_mtime == original_mtime
|
||||
|
||||
|
||||
class TestIterPythonFiles:
|
||||
def test_single_file(self, tmp_path):
|
||||
f = tmp_path / "single.py"
|
||||
f.write_text("x = 1")
|
||||
files = iter_python_files([str(f)])
|
||||
assert len(files) == 1
|
||||
assert files[0].name == "single.py"
|
||||
|
||||
def test_directory_recursion(self, tmp_path):
|
||||
(tmp_path / "sub").mkdir()
|
||||
(tmp_path / "sub" / "a.py").write_text("a=1")
|
||||
(tmp_path / "b.py").write_text("b=2")
|
||||
files = iter_python_files([str(tmp_path)])
|
||||
assert len(files) == 2
|
||||
Reference in New Issue
Block a user