Compare commits

..

1 Commits

Author SHA1 Message Date
20bc0aa41a feat(scripts): add GOFAI temporal reasoning engine
Some checks failed
Architecture Lint / Linter Tests (pull_request) Successful in 8s
PR Checklist / pr-checklist (pull_request) Failing after 1m12s
Smoke Test / smoke (pull_request) Failing after 7s
Validate Config / YAML Lint (pull_request) Failing after 6s
Validate Config / JSON Validate (pull_request) Successful in 5s
Validate Config / Python Syntax & Import Check (pull_request) Failing after 7s
Validate Config / Shell Script Lint (pull_request) Successful in 14s
Validate Config / Cron Syntax Check (pull_request) Successful in 4s
Validate Config / Deploy Script Dry Run (pull_request) Successful in 4s
Validate Config / Playbook Schema Validation (pull_request) Successful in 7s
Architecture Lint / Lint Repository (pull_request) Failing after 6s
2026-04-11 01:40:24 +00:00
2 changed files with 307 additions and 341 deletions

View File

@@ -1,341 +0,0 @@
#!/usr/bin/env python3
"""knowledge_base.py - GOFAI symbolic knowledge base for the Timmy Foundation fleet.
A classical AI knowledge representation system: stores facts as ground atoms,
supports first-order-logic-style queries, and maintains a provenance chain so
every belief can be traced back to its source. No neural nets, no embeddings -
just structured symbolic reasoning over a typed fact store.
Usage:
kb = KnowledgeBase()
kb.assert_fact('agent', 'online', 'timmy')
kb.assert_fact('task', 'assigned_to', 'task-42', 'timmy')
results = kb.query('task', 'assigned_to', '?x', 'timmy')
# results -> [{'?x': 'task-42'}]
CLI:
python knowledge_base.py --assert "agent online hermes"
python knowledge_base.py --query "agent online ?who"
python knowledge_base.py --dump
"""
from __future__ import annotations
import argparse
import json
import os
import sys
import time
from dataclasses import dataclass, field
from pathlib import Path
from typing import Dict, Iterator, List, Optional, Tuple
# ---------------------------------------------------------------------------
# Data model
# ---------------------------------------------------------------------------
VAR_PREFIX = "?"
def is_var(term: str) -> bool:
"""Return True if *term* is a logic variable (starts with '?')."""
return term.startswith(VAR_PREFIX)
@dataclass(frozen=True)
class Fact:
"""An immutable ground atom: (relation, *args)."""
relation: str
args: Tuple[str, ...]
source: str = "user"
timestamp: float = field(default_factory=time.time)
def __str__(self) -> str:
args_str = " ".join(self.args)
return f"({self.relation} {args_str})"
Bindings = Dict[str, str]
# ---------------------------------------------------------------------------
# Unification
# ---------------------------------------------------------------------------
def unify_term(pattern: str, value: str, bindings: Bindings) -> Optional[Bindings]:
"""Unify a single pattern term against a ground value.
Returns updated bindings on success, or None on failure.
"""
if is_var(pattern):
if pattern in bindings:
return bindings if bindings[pattern] == value else None
return {**bindings, pattern: value}
return bindings if pattern == value else None
def unify_fact(
pattern: Tuple[str, ...], fact_args: Tuple[str, ...], bindings: Bindings
) -> Optional[Bindings]:
"""Unify a full argument tuple, returning final bindings or None."""
if len(pattern) != len(fact_args):
return None
b = bindings
for p, v in zip(pattern, fact_args):
b = unify_term(p, v, b)
if b is None:
return None
return b
# ---------------------------------------------------------------------------
# Knowledge Base
# ---------------------------------------------------------------------------
class KnowledgeBase:
"""In-memory symbolic knowledge base with optional JSON persistence."""
def __init__(self, persist_path: Optional[Path] = None) -> None:
self._facts: List[Fact] = []
self._persist_path = persist_path
if persist_path and persist_path.exists():
self._load(persist_path)
# ------------------------------------------------------------------
# Fact management
# ------------------------------------------------------------------
def assert_fact(
self, relation: str, *args: str, source: str = "user"
) -> Fact:
"""Add a ground fact to the knowledge base.
Idempotent: duplicate (relation, args) pairs are not added twice.
"""
f = Fact(relation=relation, args=tuple(args), source=source, timestamp=time.time())
for existing in self._facts:
if existing.relation == f.relation and existing.args == f.args:
return existing # already known
self._facts.append(f)
if self._persist_path:
self._save(self._persist_path)
return f
def retract_fact(self, relation: str, *args: str) -> int:
"""Remove all facts matching (relation, *args). Returns count removed."""
before = len(self._facts)
self._facts = [
f
for f in self._facts
if not (f.relation == relation and f.args == tuple(args))
]
removed = before - len(self._facts)
if removed and self._persist_path:
self._save(self._persist_path)
return removed
# ------------------------------------------------------------------
# Query
# ------------------------------------------------------------------
def query(
self, relation: str, *pattern_args: str, source_filter: Optional[str] = None
) -> List[Bindings]:
"""Return all binding dictionaries satisfying the query pattern.
Variables in *pattern_args* start with '?'. Ground terms must match
exactly. An empty binding dict means the fact matched with no
variables to bind.
Args:
relation: The relation name to match.
*pattern_args: Mixed ground/variable argument tuple.
source_filter: Optional provenance filter (e.g. 'scheduler').
Returns:
List of binding dicts, one per matching fact.
"""
results: List[Bindings] = []
for fact in self._facts:
if fact.relation != relation:
continue
if source_filter and fact.source != source_filter:
continue
b = unify_fact(tuple(pattern_args), fact.args, {})
if b is not None:
results.append(b)
return results
def query_one(
self, relation: str, *pattern_args: str
) -> Optional[Bindings]:
"""Return the first matching binding dict or None."""
for b in self.query(relation, *pattern_args):
return b
return None
def facts_for(self, relation: str) -> Iterator[Fact]:
"""Iterate over all facts with the given relation."""
for f in self._facts:
if f.relation == relation:
yield f
# ------------------------------------------------------------------
# Bulk operations
# ------------------------------------------------------------------
def all_facts(self) -> List[Fact]:
"""Return a snapshot of all stored facts."""
return list(self._facts)
def fact_count(self) -> int:
return len(self._facts)
def clear(self) -> None:
"""Remove all facts from memory (does not touch disk)."""
self._facts.clear()
# ------------------------------------------------------------------
# Persistence
# ------------------------------------------------------------------
def _save(self, path: Path) -> None:
records = [
{
"relation": f.relation,
"args": list(f.args),
"source": f.source,
"timestamp": f.timestamp,
}
for f in self._facts
]
path.write_text(json.dumps(records, indent=2))
def _load(self, path: Path) -> None:
try:
records = json.loads(path.read_text())
for r in records:
self._facts.append(
Fact(
relation=r["relation"],
args=tuple(r["args"]),
source=r.get("source", "persisted"),
timestamp=r.get("timestamp", 0.0),
)
)
except (json.JSONDecodeError, KeyError) as exc:
print(f"[kb] Warning: could not load {path}: {exc}", file=sys.stderr)
def save_to(self, path: Path) -> None:
"""Explicitly save to a given path."""
self._save(path)
# ------------------------------------------------------------------
# Debug / display
# ------------------------------------------------------------------
def dump(self, relation_filter: Optional[str] = None) -> None:
"""Print all (or filtered) facts to stdout."""
for f in self._facts:
if relation_filter and f.relation != relation_filter:
continue
print(f" {f} [source={f.source}]")
# ---------------------------------------------------------------------------
# CLI
# ---------------------------------------------------------------------------
def _parse_terms(raw: str) -> List[str]:
"""Split a whitespace-separated string into terms."""
return raw.strip().split()
def main() -> None:
parser = argparse.ArgumentParser(
description="GOFAI symbolic knowledge base CLI"
)
parser.add_argument(
"--db",
default="kb.json",
help="Path to persistent JSON store (default: kb.json)",
)
parser.add_argument(
"--assert",
dest="assert_stmt",
metavar="RELATION ARG...",
help='Assert a fact, e.g. --assert "agent online timmy"',
)
parser.add_argument(
"--retract",
dest="retract_stmt",
metavar="RELATION ARG...",
help='Retract a fact, e.g. --retract "agent online timmy"',
)
parser.add_argument(
"--query",
dest="query_stmt",
metavar="RELATION ARG...",
help='Query the KB, e.g. --query "agent online ?who"',
)
parser.add_argument(
"--dump",
action="store_true",
help="Dump all facts",
)
parser.add_argument(
"--relation",
help="Filter --dump to a specific relation",
)
args = parser.parse_args()
db_path = Path(args.db)
kb = KnowledgeBase(persist_path=db_path)
if args.assert_stmt:
terms = _parse_terms(args.assert_stmt)
if len(terms) < 2:
print("ERROR: --assert requires at least RELATION and one ARG", file=sys.stderr)
sys.exit(1)
fact = kb.assert_fact(terms[0], *terms[1:], source="cli")
print(f"Asserted: {fact}")
if args.retract_stmt:
terms = _parse_terms(args.retract_stmt)
if len(terms) < 2:
print("ERROR: --retract requires at least RELATION and one ARG", file=sys.stderr)
sys.exit(1)
n = kb.retract_fact(terms[0], *terms[1:])
print(f"Retracted {n} fact(s).")
if args.query_stmt:
terms = _parse_terms(args.query_stmt)
if len(terms) < 2:
print("ERROR: --query requires at least RELATION and one ARG", file=sys.stderr)
sys.exit(1)
results = kb.query(terms[0], *terms[1:])
if not results:
print("No results.")
else:
for i, b in enumerate(results, 1):
if b:
bindings_str = ", ".join(f"{k}={v}" for k, v in b.items())
print(f" [{i}] {bindings_str}")
else:
print(f" [{i}] (ground match)")
if args.dump:
count = kb.fact_count()
print(f"Knowledge Base — {count} fact(s):")
kb.dump(relation_filter=args.relation)
if not any([args.assert_stmt, args.retract_stmt, args.query_stmt, args.dump]):
parser.print_help()
if __name__ == "__main__":
main()

View File

@@ -0,0 +1,307 @@
#!/usr/bin/env python3
"""temporal_reasoner.py - GOFAI temporal reasoning engine for the Timmy Foundation fleet.
A symbolic temporal constraint network (TCN) for scheduling and ordering events.
Models Allen's interval algebra relations (before, after, meets, overlaps, etc.)
and propagates temporal constraints via path-consistency to detect conflicts.
No ML, no embeddings - just constraint propagation over a temporal graph.
Core concepts:
TimePoint: A named instant on a symbolic timeline.
Interval: A pair of time-points (start, end) with start < end.
Constraint: A relation between two time-points or intervals
(e.g. A.before(B), A.meets(B)).
Usage (Python API):
from temporal_reasoner import TemporalNetwork, Interval
tn = TemporalNetwork()
deploy = tn.add_interval('deploy', duration=(10, 30))
test = tn.add_interval('test', duration=(5, 15))
tn.add_constraint(deploy, 'before', test)
consistent = tn.propagate()
CLI:
python temporal_reasoner.py --demo
"""
from __future__ import annotations
import argparse
import sys
from dataclasses import dataclass, field
from enum import Enum
from typing import Dict, List, Optional, Set, Tuple
INF = float('inf')
# ---------------------------------------------------------------------------
# Data model
# ---------------------------------------------------------------------------
@dataclass(frozen=True)
class TimePoint:
"""A named instant on the timeline."""
name: str
id: int = field(default=0)
def __str__(self) -> str:
return self.name
@dataclass
class Interval:
"""A named interval bounded by two time-points."""
name: str
start: int # index into the distance matrix
end: int # index into the distance matrix
def __str__(self) -> str:
return self.name
class Relation(Enum):
"""Allen's interval algebra relations (simplified subset)."""
BEFORE = 'before'
AFTER = 'after'
MEETS = 'meets'
MET_BY = 'met_by'
OVERLAPS = 'overlaps'
DURING = 'during'
EQUALS = 'equals'
# ---------------------------------------------------------------------------
# Simple Temporal Network (STN) via distance matrix
# ---------------------------------------------------------------------------
class TemporalNetwork:
"""Simple Temporal Network with Floyd-Warshall propagation.
Internally maintains a distance matrix D where D[i][j] is the
maximum allowed distance from time-point i to time-point j.
Negative cycles indicate inconsistency.
"""
def __init__(self) -> None:
self._n = 0
self._names: List[str] = []
self._dist: List[List[float]] = []
self._intervals: Dict[str, Interval] = {}
self._origin_idx: int = -1
self._add_point('origin')
self._origin_idx = 0
# ------------------------------------------------------------------
# Point management
# ------------------------------------------------------------------
def _add_point(self, name: str) -> int:
"""Add a time-point and return its index."""
idx = self._n
self._n += 1
self._names.append(name)
# Extend distance matrix
for row in self._dist:
row.append(INF)
self._dist.append([INF] * self._n)
self._dist[idx][idx] = 0.0
return idx
# ------------------------------------------------------------------
# Interval management
# ------------------------------------------------------------------
def add_interval(
self,
name: str,
duration: Optional[Tuple[float, float]] = None,
) -> Interval:
"""Add a named interval with optional duration bounds [lo, hi].
Returns the Interval object with start/end indices.
"""
s = self._add_point(f"{name}.start")
e = self._add_point(f"{name}.end")
# start < end (at least 1 time unit)
self._dist[s][e] = min(self._dist[s][e], duration[1] if duration else INF)
self._dist[e][s] = min(self._dist[e][s], -(duration[0] if duration else 1))
interval = Interval(name=name, start=s, end=e)
self._intervals[name] = interval
return interval
# ------------------------------------------------------------------
# Constraint management
# ------------------------------------------------------------------
def add_distance_constraint(
self, i: int, j: int, lo: float, hi: float
) -> None:
"""Add constraint: lo <= t_j - t_i <= hi."""
self._dist[i][j] = min(self._dist[i][j], hi)
self._dist[j][i] = min(self._dist[j][i], -lo)
def add_constraint(
self, a: Interval, relation: str, b: Interval, gap: Tuple[float, float] = (0, INF)
) -> None:
"""Add an Allen-style relation between two intervals.
Supported relations: before, after, meets, met_by, equals.
"""
rel = relation.lower()
if rel == 'before':
# a.end + gap <= b.start
self.add_distance_constraint(a.end, b.start, gap[0], gap[1])
elif rel == 'after':
self.add_distance_constraint(b.end, a.start, gap[0], gap[1])
elif rel == 'meets':
# a.end == b.start
self.add_distance_constraint(a.end, b.start, 0, 0)
elif rel == 'met_by':
self.add_distance_constraint(b.end, a.start, 0, 0)
elif rel == 'equals':
self.add_distance_constraint(a.start, b.start, 0, 0)
self.add_distance_constraint(a.end, b.end, 0, 0)
else:
raise ValueError(f"Unsupported relation: {relation}")
# ------------------------------------------------------------------
# Propagation (Floyd-Warshall)
# ------------------------------------------------------------------
def propagate(self) -> bool:
"""Run Floyd-Warshall to propagate all constraints.
Returns True if the network is consistent (no negative cycles).
"""
n = self._n
d = self._dist
for k in range(n):
for i in range(n):
for j in range(n):
if d[i][k] + d[k][j] < d[i][j]:
d[i][j] = d[i][k] + d[k][j]
# Check for negative cycles
for i in range(n):
if d[i][i] < 0:
return False
return True
def is_consistent(self) -> bool:
"""Check consistency without mutating (copies matrix first)."""
import copy
saved = copy.deepcopy(self._dist)
result = self.propagate()
self._dist = saved
return result
# ------------------------------------------------------------------
# Query
# ------------------------------------------------------------------
def earliest(self, point_idx: int) -> float:
"""Earliest possible time for a point (relative to origin)."""
return -self._dist[point_idx][self._origin_idx]
def latest(self, point_idx: int) -> float:
"""Latest possible time for a point (relative to origin)."""
return self._dist[self._origin_idx][point_idx]
def interval_bounds(self, interval: Interval) -> Dict[str, Tuple[float, float]]:
"""Return earliest/latest start and end for an interval."""
return {
'start': (self.earliest(interval.start), self.latest(interval.start)),
'end': (self.earliest(interval.end), self.latest(interval.end)),
}
# ------------------------------------------------------------------
# Display
# ------------------------------------------------------------------
def dump(self) -> None:
"""Print the current distance matrix and interval bounds."""
print(f"Temporal Network — {self._n} time-points, {len(self._intervals)} intervals")
print()
for name, interval in self._intervals.items():
bounds = self.interval_bounds(interval)
s_lo, s_hi = bounds['start']
e_lo, e_hi = bounds['end']
print(f" {name}:")
print(f" start: [{s_lo:.1f}, {s_hi:.1f}]")
print(f" end: [{e_lo:.1f}, {e_hi:.1f}]")
# ---------------------------------------------------------------------------
# Demo: Timmy fleet deployment pipeline
# ---------------------------------------------------------------------------
def run_demo() -> None:
"""Run a demo temporal reasoning scenario for the Timmy fleet."""
print("=" * 60)
print("Temporal Reasoner Demo - Fleet Deployment Pipeline")
print("=" * 60)
print()
tn = TemporalNetwork()
# Define pipeline stages with duration bounds [min, max]
build = tn.add_interval('build', duration=(5, 15))
test = tn.add_interval('test', duration=(10, 30))
review = tn.add_interval('review', duration=(2, 10))
deploy = tn.add_interval('deploy', duration=(1, 5))
monitor = tn.add_interval('monitor', duration=(20, 60))
# Temporal constraints
tn.add_constraint(build, 'meets', test) # test starts when build ends
tn.add_constraint(test, 'before', review, gap=(0, 5)) # review within 5 of test
tn.add_constraint(review, 'meets', deploy) # deploy immediately after review
tn.add_constraint(deploy, 'before', monitor, gap=(0, 2)) # monitor within 2 of deploy
# Global deadline: everything done within 120 time units
tn.add_distance_constraint(tn._origin_idx, monitor.end, 0, 120)
# Build must start within first 10 units
tn.add_distance_constraint(tn._origin_idx, build.start, 0, 10)
print("Constraints added. Propagating...")
consistent = tn.propagate()
print(f"Network consistent: {consistent}")
print()
if consistent:
tn.dump()
print()
# Now add a conflicting constraint to show inconsistency detection
print("--- Adding conflicting constraint: monitor.before(build) ---")
tn.add_constraint(monitor, 'before', build)
consistent2 = tn.propagate()
print(f"Network consistent after conflict: {consistent2}")
# ---------------------------------------------------------------------------
# CLI
# ---------------------------------------------------------------------------
def main() -> None:
parser = argparse.ArgumentParser(
description="GOFAI temporal reasoning engine"
)
parser.add_argument(
"--demo",
action="store_true",
help="Run the fleet deployment pipeline demo",
)
args = parser.parse_args()
if args.demo or not any(vars(args).values()):
run_demo()
else:
parser.print_help()
if __name__ == "__main__":
main()