Compare commits
1 Commits
fix/883
...
burn/1480-
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
5d878179d1 |
@@ -1,34 +0,0 @@
|
||||
{
|
||||
"roles": {
|
||||
"lead": ["publish", "checkpoint", "handoff", "read", "audit", "configure_isolation"],
|
||||
"write": ["publish", "checkpoint", "handoff", "read"],
|
||||
"read": ["read"],
|
||||
"audit": ["read", "audit"]
|
||||
},
|
||||
"isolation_profiles": [
|
||||
{
|
||||
"name": "level1-directory",
|
||||
"label": "Level 1 — directory workspace",
|
||||
"level": 1,
|
||||
"mechanism": "directory_workspace",
|
||||
"description": "Single mission cell in an isolated workspace directory.",
|
||||
"supports_resume": true
|
||||
},
|
||||
{
|
||||
"name": "level2-mount-namespace",
|
||||
"label": "Level 2 — mount namespace",
|
||||
"level": 2,
|
||||
"mechanism": "mount_namespace",
|
||||
"description": "Mount-namespace isolation with explicit mission-cell mounts.",
|
||||
"supports_resume": true
|
||||
},
|
||||
{
|
||||
"name": "level3-rootless-podman",
|
||||
"label": "Level 3 — rootless Podman",
|
||||
"level": 3,
|
||||
"mechanism": "rootless_podman",
|
||||
"description": "Rootless Podman cell for the strongest process and filesystem containment.",
|
||||
"supports_resume": true
|
||||
}
|
||||
]
|
||||
}
|
||||
72
docs/duplicate-pr-prevention.md
Normal file
72
docs/duplicate-pr-prevention.md
Normal file
@@ -0,0 +1,72 @@
|
||||
# Duplicate PR Prevention
|
||||
|
||||
## Problem
|
||||
|
||||
The burn loop creates duplicate PRs for the same issue because it doesn't check for existing PRs before creating new ones.
|
||||
|
||||
## Solution
|
||||
|
||||
Two scripts:
|
||||
|
||||
### 1. Preflight Check (`scripts/preflight-pr-check.sh`)
|
||||
|
||||
Run BEFORE creating a PR:
|
||||
|
||||
```bash
|
||||
./scripts/preflight-pr-check.sh 1128
|
||||
```
|
||||
|
||||
Output if PRs exist:
|
||||
```
|
||||
🚫 BLOCKED: 2 existing PR(s) for issue #1128
|
||||
|
||||
Existing PRs:
|
||||
#1458: feat: Close duplicate PRs for issue #1128
|
||||
Branch: dawn/1128-1776130053
|
||||
URL: https://...
|
||||
|
||||
Options:
|
||||
1. Review and merge an existing PR
|
||||
2. Close duplicates and proceed
|
||||
3. Use --force to bypass (NOT RECOMMENDED)
|
||||
```
|
||||
|
||||
Exit code 1 = blocked. Exit code 0 = safe to proceed.
|
||||
|
||||
### 2. Cleanup Script (`scripts/cleanup-duplicate-prs.sh`)
|
||||
|
||||
Close duplicate PRs:
|
||||
|
||||
```bash
|
||||
# Dry run (show what would be closed)
|
||||
./scripts/cleanup-duplicate-prs.sh 1128
|
||||
|
||||
# Actually close duplicates (keeps oldest)
|
||||
./scripts/cleanup-duplicate-prs.sh 1128 --close
|
||||
```
|
||||
|
||||
## Integration
|
||||
|
||||
### In burn loop
|
||||
|
||||
Add preflight check before PR creation:
|
||||
|
||||
```bash
|
||||
# Before: git push && curl ... /pulls
|
||||
./scripts/preflight-pr-check.sh $ISSUE_NUM || exit 1
|
||||
```
|
||||
|
||||
### In CI
|
||||
|
||||
Add as a GitHub/Gitea Actions check:
|
||||
|
||||
```yaml
|
||||
- name: Check for duplicate PRs
|
||||
run: ./scripts/preflight-pr-check.sh ${{ github.event.issue.number }}
|
||||
```
|
||||
|
||||
## Environment Variables
|
||||
|
||||
- `GITEA_TOKEN` — API token (default: reads from `~/.config/gitea/token`)
|
||||
- `GITEA_URL` — Forge URL (default: `https://forge.alexanderwhitestone.com`)
|
||||
- `GITEA_REPO` — Repository (default: `Timmy_Foundation/the-nexus`)
|
||||
@@ -1,31 +0,0 @@
|
||||
# Mission Bus
|
||||
|
||||
The Mission Bus grounds the multi-agent teaming epic with a concrete, executable shared module.
|
||||
|
||||
## What it adds
|
||||
- one unified mission stream for messages, checkpoints, and handoffs
|
||||
- role-based permissions for `lead`, `write`, `read`, and `audit`
|
||||
- cross-agent handoff packets so Agent A can checkpoint and Agent B can resume
|
||||
- declared isolation profiles for Level 1, Level 2, and Level 3 mission cells
|
||||
|
||||
## Files
|
||||
- `nexus/mission_bus.py`
|
||||
- `config/mission_bus_profiles.json`
|
||||
|
||||
## Example
|
||||
|
||||
```python
|
||||
from nexus.mission_bus import MissionBus, MissionRole, load_profiles
|
||||
from pathlib import Path
|
||||
|
||||
bus = MissionBus("mission-883", title="multi-agent teaming", config=load_profiles(Path("config/mission_bus_profiles.json")))
|
||||
bus.register_participant("timmy", MissionRole.LEAD)
|
||||
bus.register_participant("ezra", MissionRole.WRITE)
|
||||
checkpoint = bus.create_checkpoint("ezra", summary="checkpoint", state={"branch": "fix/883"})
|
||||
bus.handoff("ezra", "timmy", checkpoint.checkpoint_id, note="resume from here")
|
||||
packet = bus.build_resume_packet(bus.events[-1].handoff_id)
|
||||
```
|
||||
|
||||
## Scope of this slice
|
||||
This slice does not yet wire a live transport or rootless container launcher.
|
||||
It codifies the mission bus contract, role permissions, handoff packet, and isolation profile surface so later work can execute against a stable interface.
|
||||
@@ -14,16 +14,6 @@ from nexus.perception_adapter import (
|
||||
)
|
||||
from nexus.experience_store import ExperienceStore
|
||||
from nexus.trajectory_logger import TrajectoryLogger
|
||||
from nexus.mission_bus import (
|
||||
MissionBus,
|
||||
MissionRole,
|
||||
MissionParticipant,
|
||||
MissionMessage,
|
||||
MissionCheckpoint,
|
||||
MissionHandoff,
|
||||
IsolationProfile,
|
||||
load_profiles,
|
||||
)
|
||||
|
||||
try:
|
||||
from nexus.nexus_think import NexusMind
|
||||
@@ -38,13 +28,5 @@ __all__ = [
|
||||
"Action",
|
||||
"ExperienceStore",
|
||||
"TrajectoryLogger",
|
||||
"MissionBus",
|
||||
"MissionRole",
|
||||
"MissionParticipant",
|
||||
"MissionMessage",
|
||||
"MissionCheckpoint",
|
||||
"MissionHandoff",
|
||||
"IsolationProfile",
|
||||
"load_profiles",
|
||||
"NexusMind",
|
||||
]
|
||||
|
||||
@@ -1,358 +0,0 @@
|
||||
"""Mission bus, role permissions, cross-agent handoff, and isolation profiles.
|
||||
|
||||
Grounded implementation slice for #883.
|
||||
The bus gives a single mission cell a unified event stream, permission-checked
|
||||
roles, checkpoint + resume handoff, and declared isolation profiles for Level
|
||||
1/2/3 execution boundaries.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
import uuid
|
||||
from dataclasses import dataclass, field
|
||||
from datetime import datetime, timezone
|
||||
from enum import Enum
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Union
|
||||
|
||||
|
||||
DEFAULT_CONFIG = {
|
||||
"roles": {
|
||||
"lead": ["publish", "checkpoint", "handoff", "read", "audit", "configure_isolation"],
|
||||
"write": ["publish", "checkpoint", "handoff", "read"],
|
||||
"read": ["read"],
|
||||
"audit": ["read", "audit"],
|
||||
},
|
||||
"isolation_profiles": [
|
||||
{
|
||||
"name": "level1-directory",
|
||||
"label": "Level 1 — directory workspace",
|
||||
"level": 1,
|
||||
"mechanism": "directory_workspace",
|
||||
"description": "Single mission cell in an isolated workspace directory.",
|
||||
"supports_resume": True,
|
||||
},
|
||||
{
|
||||
"name": "level2-mount-namespace",
|
||||
"label": "Level 2 — mount namespace",
|
||||
"level": 2,
|
||||
"mechanism": "mount_namespace",
|
||||
"description": "Mount-namespace isolation with explicit mission-cell mounts.",
|
||||
"supports_resume": True,
|
||||
},
|
||||
{
|
||||
"name": "level3-rootless-podman",
|
||||
"label": "Level 3 — rootless Podman",
|
||||
"level": 3,
|
||||
"mechanism": "rootless_podman",
|
||||
"description": "Rootless Podman cell for the strongest process and filesystem containment.",
|
||||
"supports_resume": True,
|
||||
},
|
||||
],
|
||||
}
|
||||
|
||||
|
||||
def utcnow_iso() -> str:
|
||||
return datetime.now(timezone.utc).isoformat()
|
||||
|
||||
|
||||
def load_profiles(path: Path) -> Dict[str, Any]:
|
||||
if not path.exists():
|
||||
return json.loads(json.dumps(DEFAULT_CONFIG))
|
||||
with open(path, "r", encoding="utf-8") as handle:
|
||||
data = json.load(handle)
|
||||
data.setdefault("roles", DEFAULT_CONFIG["roles"])
|
||||
data.setdefault("isolation_profiles", DEFAULT_CONFIG["isolation_profiles"])
|
||||
return data
|
||||
|
||||
|
||||
class MissionRole(str, Enum):
|
||||
LEAD = "lead"
|
||||
WRITE = "write"
|
||||
READ = "read"
|
||||
AUDIT = "audit"
|
||||
|
||||
|
||||
@dataclass
|
||||
class IsolationProfile:
|
||||
name: str
|
||||
label: str
|
||||
level: int
|
||||
mechanism: str
|
||||
description: str = ""
|
||||
supports_resume: bool = True
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"name": self.name,
|
||||
"label": self.label,
|
||||
"level": self.level,
|
||||
"mechanism": self.mechanism,
|
||||
"description": self.description,
|
||||
"supports_resume": self.supports_resume,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: Dict[str, Any]) -> "IsolationProfile":
|
||||
return cls(
|
||||
name=data["name"],
|
||||
label=data["label"],
|
||||
level=int(data["level"]),
|
||||
mechanism=data["mechanism"],
|
||||
description=data.get("description", ""),
|
||||
supports_resume=bool(data.get("supports_resume", True)),
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class MissionParticipant:
|
||||
name: str
|
||||
role: MissionRole
|
||||
metadata: Dict[str, Any] = field(default_factory=dict)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"name": self.name,
|
||||
"role": self.role.value,
|
||||
"metadata": self.metadata,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: Dict[str, Any]) -> "MissionParticipant":
|
||||
return cls(name=data["name"], role=MissionRole(data["role"]), metadata=data.get("metadata", {}))
|
||||
|
||||
|
||||
@dataclass
|
||||
class MissionMessage:
|
||||
sender: str
|
||||
topic: str
|
||||
payload: Dict[str, Any]
|
||||
sequence: int
|
||||
timestamp: str = field(default_factory=utcnow_iso)
|
||||
message_id: str = field(default_factory=lambda: str(uuid.uuid4()))
|
||||
event_type: str = field(default="message", init=False)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"event_type": self.event_type,
|
||||
"sender": self.sender,
|
||||
"topic": self.topic,
|
||||
"payload": self.payload,
|
||||
"sequence": self.sequence,
|
||||
"timestamp": self.timestamp,
|
||||
"message_id": self.message_id,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: Dict[str, Any]) -> "MissionMessage":
|
||||
return cls(
|
||||
sender=data["sender"],
|
||||
topic=data["topic"],
|
||||
payload=data["payload"],
|
||||
sequence=int(data["sequence"]),
|
||||
timestamp=data.get("timestamp", utcnow_iso()),
|
||||
message_id=data.get("message_id") or data.get("messageId") or str(uuid.uuid4()),
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class MissionCheckpoint:
|
||||
sender: str
|
||||
summary: str
|
||||
state: Dict[str, Any]
|
||||
sequence: int
|
||||
artifacts: List[str] = field(default_factory=list)
|
||||
timestamp: str = field(default_factory=utcnow_iso)
|
||||
checkpoint_id: str = field(default_factory=lambda: str(uuid.uuid4()))
|
||||
event_type: str = field(default="checkpoint", init=False)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"event_type": self.event_type,
|
||||
"sender": self.sender,
|
||||
"summary": self.summary,
|
||||
"state": self.state,
|
||||
"artifacts": self.artifacts,
|
||||
"sequence": self.sequence,
|
||||
"timestamp": self.timestamp,
|
||||
"checkpoint_id": self.checkpoint_id,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: Dict[str, Any]) -> "MissionCheckpoint":
|
||||
return cls(
|
||||
sender=data["sender"],
|
||||
summary=data["summary"],
|
||||
state=data.get("state", {}),
|
||||
artifacts=list(data.get("artifacts", [])),
|
||||
sequence=int(data["sequence"]),
|
||||
timestamp=data.get("timestamp", utcnow_iso()),
|
||||
checkpoint_id=data.get("checkpoint_id") or data.get("checkpointId") or str(uuid.uuid4()),
|
||||
)
|
||||
|
||||
|
||||
@dataclass
|
||||
class MissionHandoff:
|
||||
sender: str
|
||||
recipient: str
|
||||
checkpoint_id: str
|
||||
sequence: int
|
||||
note: str = ""
|
||||
timestamp: str = field(default_factory=utcnow_iso)
|
||||
handoff_id: str = field(default_factory=lambda: str(uuid.uuid4()))
|
||||
event_type: str = field(default="handoff", init=False)
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"event_type": self.event_type,
|
||||
"sender": self.sender,
|
||||
"recipient": self.recipient,
|
||||
"checkpoint_id": self.checkpoint_id,
|
||||
"note": self.note,
|
||||
"sequence": self.sequence,
|
||||
"timestamp": self.timestamp,
|
||||
"handoff_id": self.handoff_id,
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: Dict[str, Any]) -> "MissionHandoff":
|
||||
return cls(
|
||||
sender=data["sender"],
|
||||
recipient=data["recipient"],
|
||||
checkpoint_id=data["checkpoint_id"] if "checkpoint_id" in data else data["checkpointId"],
|
||||
note=data.get("note", ""),
|
||||
sequence=int(data["sequence"]),
|
||||
timestamp=data.get("timestamp", utcnow_iso()),
|
||||
handoff_id=data.get("handoff_id") or data.get("handoffId") or str(uuid.uuid4()),
|
||||
)
|
||||
|
||||
|
||||
MissionEvent = Union[MissionMessage, MissionCheckpoint, MissionHandoff]
|
||||
|
||||
|
||||
def event_from_dict(data: Dict[str, Any]) -> MissionEvent:
|
||||
kind = data["event_type"]
|
||||
if kind == "message":
|
||||
return MissionMessage.from_dict(data)
|
||||
if kind == "checkpoint":
|
||||
return MissionCheckpoint.from_dict(data)
|
||||
if kind == "handoff":
|
||||
return MissionHandoff.from_dict(data)
|
||||
raise ValueError(f"Unknown mission event type: {kind}")
|
||||
|
||||
|
||||
class MissionBus:
|
||||
def __init__(self, mission_id: str, title: str = "", config: Dict[str, Any] | None = None):
|
||||
self.mission_id = mission_id
|
||||
self.title = title
|
||||
self.config = config or json.loads(json.dumps(DEFAULT_CONFIG))
|
||||
self.role_permissions = {
|
||||
role: set(perms) for role, perms in self.config.get("roles", {}).items()
|
||||
}
|
||||
self.isolation_profiles = [
|
||||
IsolationProfile.from_dict(entry) for entry in self.config.get("isolation_profiles", [])
|
||||
]
|
||||
self.participants: Dict[str, MissionParticipant] = {}
|
||||
self.events: List[MissionEvent] = []
|
||||
|
||||
def register_participant(self, name: str, role: MissionRole, metadata: Dict[str, Any] | None = None) -> MissionParticipant:
|
||||
participant = MissionParticipant(name=name, role=role, metadata=metadata or {})
|
||||
self.participants[name] = participant
|
||||
return participant
|
||||
|
||||
def allowed(self, name: str, capability: str) -> bool:
|
||||
participant = self.participants.get(name)
|
||||
if participant is None:
|
||||
return False
|
||||
return capability in self.role_permissions.get(participant.role.value, set())
|
||||
|
||||
def _require(self, name: str, capability: str) -> None:
|
||||
if not self.allowed(name, capability):
|
||||
raise PermissionError(f"{name} lacks '{capability}' permission")
|
||||
|
||||
def _next_sequence(self) -> int:
|
||||
return len(self.events) + 1
|
||||
|
||||
def publish(self, sender: str, topic: str, payload: Dict[str, Any]) -> MissionMessage:
|
||||
self._require(sender, "publish")
|
||||
event = MissionMessage(sender=sender, topic=topic, payload=payload, sequence=self._next_sequence())
|
||||
self.events.append(event)
|
||||
return event
|
||||
|
||||
def create_checkpoint(
|
||||
self,
|
||||
sender: str,
|
||||
summary: str,
|
||||
state: Dict[str, Any],
|
||||
artifacts: List[str] | None = None,
|
||||
) -> MissionCheckpoint:
|
||||
self._require(sender, "checkpoint")
|
||||
event = MissionCheckpoint(
|
||||
sender=sender,
|
||||
summary=summary,
|
||||
state=state,
|
||||
artifacts=list(artifacts or []),
|
||||
sequence=self._next_sequence(),
|
||||
)
|
||||
self.events.append(event)
|
||||
return event
|
||||
|
||||
def _get_checkpoint(self, checkpoint_id: str) -> MissionCheckpoint:
|
||||
for event in self.events:
|
||||
if isinstance(event, MissionCheckpoint) and event.checkpoint_id == checkpoint_id:
|
||||
return event
|
||||
raise KeyError(f"Unknown checkpoint: {checkpoint_id}")
|
||||
|
||||
def _get_handoff(self, handoff_id: str) -> MissionHandoff:
|
||||
for event in self.events:
|
||||
if isinstance(event, MissionHandoff) and event.handoff_id == handoff_id:
|
||||
return event
|
||||
raise KeyError(f"Unknown handoff: {handoff_id}")
|
||||
|
||||
def handoff(self, sender: str, recipient: str, checkpoint_id: str, note: str = "") -> MissionHandoff:
|
||||
self._require(sender, "handoff")
|
||||
if recipient not in self.participants:
|
||||
raise KeyError(f"Unknown recipient: {recipient}")
|
||||
self._get_checkpoint(checkpoint_id)
|
||||
event = MissionHandoff(
|
||||
sender=sender,
|
||||
recipient=recipient,
|
||||
checkpoint_id=checkpoint_id,
|
||||
note=note,
|
||||
sequence=self._next_sequence(),
|
||||
)
|
||||
self.events.append(event)
|
||||
return event
|
||||
|
||||
def build_resume_packet(self, handoff_id: str) -> Dict[str, Any]:
|
||||
handoff = self._get_handoff(handoff_id)
|
||||
checkpoint = self._get_checkpoint(handoff.checkpoint_id)
|
||||
return {
|
||||
"mission_id": self.mission_id,
|
||||
"title": self.title,
|
||||
"recipient": handoff.recipient,
|
||||
"sender": handoff.sender,
|
||||
"handoff_note": handoff.note,
|
||||
"checkpoint": checkpoint.to_dict(),
|
||||
"participants": {name: participant.to_dict() for name, participant in self.participants.items()},
|
||||
"isolation_profiles": [profile.to_dict() for profile in self.isolation_profiles],
|
||||
"stream_length": len(self.events),
|
||||
}
|
||||
|
||||
def to_dict(self) -> Dict[str, Any]:
|
||||
return {
|
||||
"mission_id": self.mission_id,
|
||||
"title": self.title,
|
||||
"config": self.config,
|
||||
"participants": {name: participant.to_dict() for name, participant in self.participants.items()},
|
||||
"events": [event.to_dict() for event in self.events],
|
||||
}
|
||||
|
||||
@classmethod
|
||||
def from_dict(cls, data: Dict[str, Any]) -> "MissionBus":
|
||||
bus = cls(data["mission_id"], title=data.get("title", ""), config=data.get("config"))
|
||||
for name, participant_data in data.get("participants", {}).items():
|
||||
bus.participants[name] = MissionParticipant.from_dict(participant_data)
|
||||
bus.events = [event_from_dict(event_data) for event_data in data.get("events", [])]
|
||||
return bus
|
||||
@@ -1,111 +0,0 @@
|
||||
# Night Shift Prediction Report — April 12-13, 2026
|
||||
|
||||
## Starting State (11:36 PM)
|
||||
|
||||
```
|
||||
Time: 11:36 PM EDT
|
||||
Automation: 13 burn loops × 3min + 1 explorer × 10min + 1 backlog × 30min
|
||||
API: Nous/xiaomi/mimo-v2-pro (FREE)
|
||||
Rate: 268 calls/hour
|
||||
Duration: 7.5 hours until 7 AM
|
||||
Total expected API calls: ~2,010
|
||||
```
|
||||
|
||||
## Burn Loops Active (13 @ every 3 min)
|
||||
|
||||
| Loop | Repo | Focus |
|
||||
|------|------|-------|
|
||||
| Testament Burn | the-nexus | MUD bridge + paper |
|
||||
| Foundation Burn | all repos | Gitea issues |
|
||||
| beacon-sprint | the-nexus | paper iterations |
|
||||
| timmy-home sprint | timmy-home | 226 issues |
|
||||
| Beacon sprint | the-beacon | game issues |
|
||||
| timmy-config sprint | timmy-config | config issues |
|
||||
| the-door burn | the-door | crisis front door |
|
||||
| the-testament burn | the-testament | book |
|
||||
| the-nexus burn | the-nexus | 3D world + MUD |
|
||||
| fleet-ops burn | fleet-ops | sovereign fleet |
|
||||
| timmy-academy burn | timmy-academy | academy |
|
||||
| turboquant burn | turboquant | KV-cache compression |
|
||||
| wolf burn | wolf | model evaluation |
|
||||
|
||||
## Expected Outcomes by 7 AM
|
||||
|
||||
### API Calls
|
||||
- Total calls: ~2,010
|
||||
- Successful completions: ~1,400 (70%)
|
||||
- API errors (rate limit, timeout): ~400 (20%)
|
||||
- Iteration limits hit: ~210 (10%)
|
||||
|
||||
### Commits
|
||||
- Total commits pushed: ~800-1,200
|
||||
- Average per loop: ~60-90 commits
|
||||
- Unique branches created: ~300-400
|
||||
|
||||
### Pull Requests
|
||||
- Total PRs created: ~150-250
|
||||
- Average per loop: ~12-19 PRs
|
||||
|
||||
### Issues Filed
|
||||
- New issues created (QA, explorer): ~20-40
|
||||
- Issues closed by PRs: ~50-100
|
||||
|
||||
### Code Written
|
||||
- Estimated lines added: ~50,000-100,000
|
||||
- Estimated files created/modified: ~2,000-3,000
|
||||
|
||||
### Paper Progress
|
||||
- Research paper iterations: ~150 cycles
|
||||
- Expected paper word count growth: ~5,000-10,000 words
|
||||
- New experiment results: 2-4 additional experiments
|
||||
- BibTeX citations: 10-20 verified citations
|
||||
|
||||
### MUD Bridge
|
||||
- Bridge file: 2,875 → ~5,000+ lines
|
||||
- New game systems: 5-10 (combat tested, economy, social graph, leaderboard)
|
||||
- QA cycles: 15-30 exploration sessions
|
||||
- Critical bugs found: 3-5
|
||||
- Critical bugs fixed: 2-3
|
||||
|
||||
### Repository Activity (per repo)
|
||||
| Repo | Expected PRs | Expected Commits |
|
||||
|------|-------------|-----------------|
|
||||
| the-nexus | 30-50 | 200-300 |
|
||||
| the-beacon | 20-30 | 150-200 |
|
||||
| timmy-config | 15-25 | 100-150 |
|
||||
| the-testament | 10-20 | 80-120 |
|
||||
| the-door | 5-10 | 40-60 |
|
||||
| timmy-home | 10-20 | 80-120 |
|
||||
| fleet-ops | 5-10 | 40-60 |
|
||||
| timmy-academy | 5-10 | 40-60 |
|
||||
| turboquant | 3-5 | 20-30 |
|
||||
| wolf | 3-5 | 20-30 |
|
||||
|
||||
### Dream Cycle
|
||||
- 5 dreams generated (11:30 PM, 1 AM, 2:30 AM, 4 AM, 5:30 AM)
|
||||
- 1 reflection (10 PM)
|
||||
- 1 timmy-dreams (5:30 AM)
|
||||
- Total dream output: ~5,000-8,000 words of creative writing
|
||||
|
||||
### Explorer (every 10 min)
|
||||
- ~45 exploration cycles
|
||||
- Bugs found: 15-25
|
||||
- Issues filed: 15-25
|
||||
|
||||
### Risk Factors
|
||||
- API rate limiting: Possible after 500+ consecutive calls
|
||||
- Large file patch failures: Bridge file too large for agents
|
||||
- Branch conflicts: Multiple agents on same repo
|
||||
- Iteration limits: 5-iteration agents can't push
|
||||
- Repository cloning: May hit timeout on slow clones
|
||||
|
||||
### Confidence Level
|
||||
- High confidence: 800+ commits, 150+ PRs
|
||||
- Medium confidence: 1,000+ commits, 200+ PRs
|
||||
- Low confidence: 1,200+ commits, 250+ PRs (requires all loops running clean)
|
||||
|
||||
---
|
||||
|
||||
*This report is a prediction. The 7 AM morning report will compare actual results.*
|
||||
*Generated: 2026-04-12 23:36 EDT*
|
||||
*Author: Timmy (pre-shift prediction)*
|
||||
@@ -1,170 +1,101 @@
|
||||
#!/usr/bin/env bash
|
||||
# ═══════════════════════════════════════════════════════════════
|
||||
# cleanup-duplicate-prs.sh — Identify and close duplicate open PRs
|
||||
#
|
||||
# This script identifies PRs that are duplicates (same issue number
|
||||
# or very similar titles) and closes the older ones.
|
||||
# cleanup-duplicate-prs.sh — Close duplicate PRs for a given issue
|
||||
#
|
||||
# Usage:
|
||||
# ./scripts/cleanup-duplicate-prs.sh [--dry-run] [--close]
|
||||
# ./scripts/cleanup-duplicate-prs.sh <issue_number> [--close]
|
||||
#
|
||||
# Options:
|
||||
# --dry-run Show what would be done without making changes
|
||||
# --close Actually close duplicate PRs (default is dry-run)
|
||||
#
|
||||
# Designed for issue #1128: Forge Cleanup
|
||||
# ═══════════════════════════════════════════════════════════════
|
||||
# Without --close: dry run (show what would be closed)
|
||||
# With --close: actually close the duplicates
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# ─── Configuration ──────────────────────────────────────────
|
||||
ISSUE_NUM="${1:?Usage: cleanup-duplicate-prs.sh <issue_number> [--close]}"
|
||||
CLOSE_MODE="${2:-}"
|
||||
GITEA_URL="${GITEA_URL:-https://forge.alexanderwhitestone.com}"
|
||||
GITEA_TOKEN="${GITEA_TOKEN:?Set GITEA_TOKEN env var}"
|
||||
REPO="${REPO:-Timmy_Foundation/the-nexus}"
|
||||
DRY_RUN="${DRY_RUN:-true}"
|
||||
GITEA_TOKEN="${GITEA_TOKEN:-$(cat ~/.config/gitea/token 2>/dev/null || echo '')}"
|
||||
REPO="${GITEA_REPO:-Timmy_Foundation/the-nexus}"
|
||||
|
||||
# Parse command line arguments
|
||||
for arg in "$@"; do
|
||||
case $arg in
|
||||
--dry-run)
|
||||
DRY_RUN="true"
|
||||
;;
|
||||
--close)
|
||||
DRY_RUN="false"
|
||||
;;
|
||||
esac
|
||||
done
|
||||
if [ -z "$GITEA_TOKEN" ]; then
|
||||
echo "ERROR: GITEA_TOKEN not set"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
API="$GITEA_URL/api/v1"
|
||||
AUTH="token $GITEA_TOKEN"
|
||||
REPO_API="${GITEA_URL}/api/v1/repos/${REPO}"
|
||||
|
||||
log() { echo "[$(date -u +%Y-%m-%dT%H:%M:%SZ)] $*"; }
|
||||
# Fetch open PRs
|
||||
PRS=$(curl -sf -H "Authorization: token ${GITEA_TOKEN}" "${REPO_API}/pulls?state=open&limit=50" 2>/dev/null || echo '[]')
|
||||
|
||||
# ─── Fetch open PRs ────────────────────────────────────────
|
||||
log "Checking open PRs for $REPO (dry_run: $DRY_RUN)"
|
||||
# Find matching PRs
|
||||
MATCHES=$(echo "$PRS" | python3 -c "
|
||||
import json, sys
|
||||
prs = json.load(sys.stdin)
|
||||
issue = '${ISSUE_NUM}'
|
||||
matches = []
|
||||
for pr in prs:
|
||||
title = pr.get('title', '')
|
||||
body = pr.get('body', '')
|
||||
ref = pr.get('head', {}).get('ref', '')
|
||||
if f'#{issue}' in title or f'#{issue}' in body or issue in ref:
|
||||
matches.append(pr)
|
||||
json.dump(matches, sys.stdout)
|
||||
" 2>/dev/null || echo '[]')
|
||||
|
||||
OPEN_PRS=$(curl -s -H "$AUTH" "$API/repos/$REPO/pulls?state=open&limit=50")
|
||||
COUNT=$(echo "$MATCHES" | python3 -c "import json,sys; print(len(json.load(sys.stdin)))" 2>/dev/null || echo '0')
|
||||
|
||||
if [ -z "$OPEN_PRS" ] || [ "$OPEN_PRS" = "null" ]; then
|
||||
log "No open PRs found or API error"
|
||||
if [ "$COUNT" -eq 0 ]; then
|
||||
echo "No PRs found for issue #$ISSUE_NUM"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Count PRs
|
||||
PR_COUNT=$(echo "$OPEN_PRS" | jq length)
|
||||
log "Found $PR_COUNT open PRs"
|
||||
echo "Found $COUNT PR(s) for issue #$ISSUE_NUM:"
|
||||
echo "$MATCHES" | python3 -c "
|
||||
import json, sys
|
||||
prs = json.load(sys.stdin)
|
||||
for pr in prs:
|
||||
print(f" #{pr['number']}: {pr['title']} [{pr['head']['ref']}]")
|
||||
"
|
||||
|
||||
if [ "$PR_COUNT" -eq 0 ]; then
|
||||
log "No open PRs to process"
|
||||
if [ "$COUNT" -le 1 ]; then
|
||||
echo ""
|
||||
echo "Only 1 PR found. No cleanup needed."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# ─── Extract issue numbers from PR titles ──────────────────
|
||||
# Create a temporary file for PR data
|
||||
TEMP_FILE=$(mktemp)
|
||||
echo "$OPEN_PRS" | jq -r '.[] | "\(.number)\t\(.title)\t\(.created_at)\t\(.head.ref)"' > "$TEMP_FILE"
|
||||
# Keep the oldest PR, close the rest
|
||||
echo "$MATCHES" | python3 -c "
|
||||
import json, sys
|
||||
prs = json.load(sys.stdin)
|
||||
prs.sort(key=lambda p: p['number'])
|
||||
keep = prs[0]
|
||||
close = prs[1:]
|
||||
print(f'KEEP: #{keep["number"]}: {keep["title"]}')
|
||||
for pr in close:
|
||||
print(f'CLOSE: #{pr["number"]}: {pr["title"]}')
|
||||
"
|
||||
|
||||
# Group PRs by issue number using temporary files
|
||||
TEMP_DIR=$(mktemp -d)
|
||||
trap "rm -rf $TEMP_DIR" EXIT
|
||||
|
||||
while IFS=$'\t' read -r pr_number pr_title pr_created pr_branch; do
|
||||
# Extract issue number from title (look for #123 pattern)
|
||||
if [[ $pr_title =~ \#([0-9]+) ]]; then
|
||||
issue_num="${BASH_REMATCH[1]}"
|
||||
echo "$pr_number,$pr_created,$pr_branch" >> "$TEMP_DIR/issue_$issue_num.txt"
|
||||
fi
|
||||
done < "$TEMP_FILE"
|
||||
|
||||
rm -f "$TEMP_FILE"
|
||||
|
||||
# ─── Identify and process duplicates ──────────────────────
|
||||
DUPLICATES_FOUND=0
|
||||
CLOSED_COUNT=0
|
||||
|
||||
for issue_file in "$TEMP_DIR"/issue_*.txt; do
|
||||
[ -f "$issue_file" ] || continue
|
||||
|
||||
issue_num=$(basename "$issue_file" .txt | sed 's/issue_//')
|
||||
pr_list=$(cat "$issue_file")
|
||||
|
||||
# Count PRs for this issue
|
||||
pr_count=$(echo -n "$pr_list" | grep -c '^' || true)
|
||||
|
||||
if [ "$pr_count" -le 1 ]; then
|
||||
continue # No duplicates
|
||||
fi
|
||||
|
||||
log "Issue #$issue_num has $pr_count open PRs"
|
||||
DUPLICATES_FOUND=$((DUPLICATES_FOUND + 1))
|
||||
|
||||
# Sort by creation date (oldest first)
|
||||
sorted_prs=$(echo -n "$pr_list" | sort -t',' -k2)
|
||||
|
||||
# Keep the newest PR, close the rest
|
||||
newest_pr=""
|
||||
newest_date=""
|
||||
|
||||
while IFS=',' read -r pr_num pr_date pr_branch; do
|
||||
if [ -z "$newest_date" ] || [[ "$pr_date" > "$newest_date" ]]; then
|
||||
newest_pr="$pr_num"
|
||||
newest_date="$pr_date"
|
||||
fi
|
||||
done <<< "$sorted_prs"
|
||||
|
||||
log "Keeping PR #$newest_pr (newest)"
|
||||
|
||||
# Close older PRs
|
||||
while IFS=',' read -r pr_num pr_date pr_branch; do
|
||||
if [ "$pr_num" = "$newest_pr" ]; then
|
||||
continue # Skip the newest PR
|
||||
fi
|
||||
|
||||
log "Closing duplicate PR #$pr_num for issue #$issue_num"
|
||||
|
||||
if [ "$DRY_RUN" = "true" ]; then
|
||||
log "DRY RUN: Would close PR #$pr_num"
|
||||
else
|
||||
# Add a comment explaining why we're closing
|
||||
comment_body="Closing as duplicate. PR #$newest_pr is newer and addresses the same issue (#$issue_num)."
|
||||
|
||||
curl -s -X POST -H "$AUTH" -H "Content-Type: application/json" -d "{\"body\": \"$comment_body\"}" "$API/repos/$REPO/issues/$pr_num/comments" > /dev/null
|
||||
|
||||
# Close the PR
|
||||
curl -s -X PATCH -H "$AUTH" -H "Content-Type: application/json" -d '{"state": "closed"}' "$API/repos/$REPO/pulls/$pr_num" > /dev/null
|
||||
|
||||
log "Closed PR #$pr_num"
|
||||
CLOSED_COUNT=$((CLOSED_COUNT + 1))
|
||||
fi
|
||||
done <<< "$sorted_prs"
|
||||
done
|
||||
|
||||
# ─── Summary ──────────────────────────────────────────────
|
||||
log "Cleanup complete:"
|
||||
log " Duplicate issue groups found: $DUPLICATES_FOUND"
|
||||
log " PRs closed: $CLOSED_COUNT"
|
||||
log " Dry run: $DRY_RUN"
|
||||
|
||||
if [ "$DUPLICATES_FOUND" -eq 0 ]; then
|
||||
log "No duplicate PRs found"
|
||||
if [ "$CLOSE_MODE" != "--close" ]; then ""
|
||||
echo ""
|
||||
echo "DRY RUN: Add --close to actually close duplicates"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# ─── Additional cleanup: Stale PRs ────────────────────────
|
||||
# Check for PRs older than 30 days with no activity
|
||||
log "Checking for stale PRs (older than 30 days)..."
|
||||
# Close duplicates
|
||||
echo "$MATCHES" | python3 -c "
|
||||
import json, sys, urllib.request, os
|
||||
prs = json.load(sys.stdin)
|
||||
prs.sort(key=lambda p: p['number'])
|
||||
token = '${GITEA_TOKEN}'
|
||||
api = '${REPO_API}'
|
||||
for pr in prs[1:]:
|
||||
url = f'{api}/pulls/{pr["number"]}'
|
||||
data = json.dumps({'state': 'closed'}).encode()
|
||||
req = urllib.request.Request(url, data=data, headers={'Authorization': f'token {token}', 'Content-Type': 'application/json'}, method='PATCH')
|
||||
try:
|
||||
urllib.request.urlopen(req)
|
||||
print(f'Closed PR #{pr["number"]}')
|
||||
except Exception as e:
|
||||
print(f'Error closing #{pr["number"]}: {e}')
|
||||
"
|
||||
|
||||
THIRTY_DAYS_AGO=$(date -u -v-30d +%Y-%m-%dT%H:%M:%SZ 2>/dev/null || date -u -d "30 days ago" +%Y-%m-%dT%H:%M:%SZ)
|
||||
|
||||
STALE_PRS=$(echo "$OPEN_PRS" | jq -r --arg cutoff "$THIRTY_DAYS_AGO" '.[] | select(.created_at < $cutoff) | "\(.number)\t\(.title)\t\(.created_at)"')
|
||||
|
||||
if [ -n "$STALE_PRS" ]; then
|
||||
STALE_COUNT=$(echo -n "$STALE_PRS" | grep -c '^' || true)
|
||||
log "Found $STALE_COUNT stale PRs (older than 30 days)"
|
||||
|
||||
echo "$STALE_PRS" | while IFS=$'\t' read -r pr_num pr_title pr_created; do
|
||||
log "Stale PR #$pr_num: $pr_title (created: $pr_created)"
|
||||
done
|
||||
else
|
||||
log "No stale PRs found"
|
||||
fi
|
||||
|
||||
log "Script complete"
|
||||
echo ""
|
||||
echo "Cleanup complete."
|
||||
|
||||
82
scripts/preflight-pr-check.sh
Executable file
82
scripts/preflight-pr-check.sh
Executable file
@@ -0,0 +1,82 @@
|
||||
#!/usr/bin/env bash
|
||||
# preflight-pr-check.sh — Prevent duplicate PRs before creating them
|
||||
#
|
||||
# Usage:
|
||||
# ./scripts/preflight-pr-check.sh <issue_number>
|
||||
#
|
||||
# Exit codes:
|
||||
# 0 = safe to proceed (no existing PRs)
|
||||
# 1 = BLOCKED (existing PRs found)
|
||||
# 2 = error
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
ISSUE_NUM="${1:?Usage: preflight-pr-check.sh <issue_number>}"
|
||||
GITEA_URL="${GITEA_URL:-https://forge.alexanderwhitestone.com}"
|
||||
GITEA_TOKEN="${GITEA_TOKEN:-$(cat ~/.config/gitea/token 2>/dev/null || echo '')}"
|
||||
REPO="${GITEA_REPO:-Timmy_Foundation/the-nexus}"
|
||||
|
||||
if [ -z "$GITEA_TOKEN" ]; then
|
||||
echo "ERROR: GITEA_TOKEN not set and ~/.config/gitea/token not found"
|
||||
exit 2
|
||||
fi
|
||||
|
||||
# Get repo info
|
||||
REPO_API="${GITEA_URL}/api/v1/repos/${REPO}"
|
||||
|
||||
# Fetch open PRs
|
||||
PRS=$(curl -sf -H "Authorization: token ${GITEA_TOKEN}" "${REPO_API}/pulls?state=open&limit=50" 2>/dev/null || echo '[]')
|
||||
|
||||
# Check for existing PRs referencing this issue
|
||||
MATCHING_PRS=$(echo "$PRS" | python3 -c "
|
||||
import json, sys
|
||||
prs = json.load(sys.stdin)
|
||||
issue = '${ISSUE_NUM}'
|
||||
matches = []
|
||||
for pr in prs:
|
||||
title = pr.get('title', '')
|
||||
body = pr.get('body', '')
|
||||
ref = pr.get('head', {}).get('ref', '')
|
||||
if f'#{issue}' in title or f'#{issue}' in body or issue in ref:
|
||||
matches.append({
|
||||
'number': pr['number'],
|
||||
'title': title,
|
||||
'branch': ref,
|
||||
'url': pr.get('html_url', '')
|
||||
})
|
||||
json.dump(matches, sys.stdout)
|
||||
" 2>/dev/null || echo '[]')
|
||||
|
||||
COUNT=$(echo "$MATCHING_PRS" | python3 -c "import json,sys; print(len(json.load(sys.stdin)))" 2>/dev/null || echo '0')
|
||||
|
||||
if [ "$COUNT" -gt 0 ]; then
|
||||
echo "╔══════════════════════════════════════════════════════════════╗"
|
||||
echo "║ 🚫 BLOCKED: $COUNT existing PR(s) for issue #$ISSUE_NUM"
|
||||
echo "╚══════════════════════════════════════════════════════════════╝"
|
||||
echo ""
|
||||
echo "Existing PRs:"
|
||||
echo "$MATCHING_PRS" | python3 -c "
|
||||
import json, sys
|
||||
prs = json.load(sys.stdin)
|
||||
for pr in prs:
|
||||
print(f" #{pr['number']}: {pr['title']}")
|
||||
print(f" Branch: {pr['branch']}")
|
||||
print(f" URL: {pr['url']}")
|
||||
print()
|
||||
"
|
||||
echo "Options:"
|
||||
echo " 1. Review and merge an existing PR"
|
||||
echo " 2. Close duplicates and proceed"
|
||||
echo " 3. Use --force to bypass (NOT RECOMMENDED)"
|
||||
echo ""
|
||||
|
||||
if [ "${2:-}" = "--force" ]; then
|
||||
echo "⚠️ --force flag detected. Bypassing duplicate check."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
exit 1
|
||||
else
|
||||
echo "✅ Safe to proceed: No existing PRs for issue #$ISSUE_NUM"
|
||||
exit 0
|
||||
fi
|
||||
@@ -1,107 +0,0 @@
|
||||
from importlib import util
|
||||
from pathlib import Path
|
||||
import sys
|
||||
|
||||
import pytest
|
||||
|
||||
|
||||
ROOT = Path(__file__).resolve().parent.parent
|
||||
MODULE_PATH = ROOT / "nexus" / "mission_bus.py"
|
||||
CONFIG_PATH = ROOT / "config" / "mission_bus_profiles.json"
|
||||
|
||||
|
||||
def load_module():
|
||||
spec = util.spec_from_file_location("mission_bus", MODULE_PATH)
|
||||
module = util.module_from_spec(spec)
|
||||
assert spec.loader is not None
|
||||
sys.modules[spec.name] = module
|
||||
spec.loader.exec_module(module)
|
||||
return module
|
||||
|
||||
|
||||
def build_bus(module):
|
||||
profiles = module.load_profiles(CONFIG_PATH)
|
||||
bus = module.MissionBus("mission-883", title="multi-agent teaming", config=profiles)
|
||||
bus.register_participant("timmy", module.MissionRole.LEAD)
|
||||
bus.register_participant("ezra", module.MissionRole.WRITE)
|
||||
bus.register_participant("bezalel", module.MissionRole.READ)
|
||||
bus.register_participant("allegro", module.MissionRole.AUDIT)
|
||||
return bus
|
||||
|
||||
|
||||
def test_role_permissions_gate_publish_checkpoint_and_handoff():
|
||||
module = load_module()
|
||||
bus = build_bus(module)
|
||||
|
||||
assert bus.allowed("timmy", "publish") is True
|
||||
assert bus.allowed("ezra", "handoff") is True
|
||||
assert bus.allowed("allegro", "audit") is True
|
||||
assert bus.allowed("bezalel", "publish") is False
|
||||
|
||||
with pytest.raises(PermissionError):
|
||||
bus.publish("bezalel", "mission.notes", {"text": "should fail"})
|
||||
|
||||
with pytest.raises(PermissionError):
|
||||
bus.create_checkpoint("allegro", summary="audit cannot checkpoint", state={})
|
||||
|
||||
|
||||
def test_mission_bus_unified_stream_records_messages_checkpoints_and_handoffs():
|
||||
module = load_module()
|
||||
bus = build_bus(module)
|
||||
|
||||
msg = bus.publish("timmy", "mission.start", {"goal": "build the slice"})
|
||||
checkpoint = bus.create_checkpoint(
|
||||
"ezra",
|
||||
summary="checkpoint before lead review",
|
||||
state={"branch": "fix/883", "files": ["nexus/mission_bus.py"]},
|
||||
artifacts=["docs/mission-bus.md"],
|
||||
)
|
||||
handoff = bus.handoff("ezra", "timmy", checkpoint.checkpoint_id, note="ready for lead review")
|
||||
|
||||
assert [event.event_type for event in bus.events] == ["message", "checkpoint", "handoff"]
|
||||
assert [event.sequence for event in bus.events] == [1, 2, 3]
|
||||
assert msg.topic == "mission.start"
|
||||
assert handoff.recipient == "timmy"
|
||||
|
||||
|
||||
def test_handoff_resume_packet_contains_checkpoint_state_and_participants():
|
||||
module = load_module()
|
||||
bus = build_bus(module)
|
||||
checkpoint = bus.create_checkpoint(
|
||||
"ezra",
|
||||
summary="handoff package",
|
||||
state={"branch": "fix/883", "tests": ["tests/test_mission_bus.py"]},
|
||||
artifacts=["config/mission_bus_profiles.json"],
|
||||
)
|
||||
handoff = bus.handoff("ezra", "timmy", checkpoint.checkpoint_id, note="pick up from here")
|
||||
|
||||
packet = bus.build_resume_packet(handoff.handoff_id)
|
||||
assert packet["recipient"] == "timmy"
|
||||
assert packet["checkpoint"]["state"]["branch"] == "fix/883"
|
||||
assert packet["checkpoint"]["artifacts"] == ["config/mission_bus_profiles.json"]
|
||||
assert packet["participants"]["ezra"]["role"] == "write"
|
||||
assert packet["handoff_note"] == "pick up from here"
|
||||
|
||||
|
||||
def test_profiles_define_level2_mount_namespace_and_level3_rootless_podman():
|
||||
module = load_module()
|
||||
profiles = module.load_profiles(CONFIG_PATH)
|
||||
|
||||
levels = {entry["level"]: entry["mechanism"] for entry in profiles["isolation_profiles"]}
|
||||
assert levels[2] == "mount_namespace"
|
||||
assert levels[3] == "rootless_podman"
|
||||
assert profiles["roles"]["audit"] == ["read", "audit"]
|
||||
|
||||
|
||||
def test_mission_bus_roundtrip_preserves_events_and_isolation_profile():
|
||||
module = load_module()
|
||||
bus = build_bus(module)
|
||||
bus.publish("timmy", "mission.start", {"goal": "roundtrip"})
|
||||
checkpoint = bus.create_checkpoint("ezra", summary="save state", state={"count": 1})
|
||||
bus.handoff("ezra", "timmy", checkpoint.checkpoint_id, note="resume")
|
||||
|
||||
restored = module.MissionBus.from_dict(bus.to_dict())
|
||||
assert restored.mission_id == "mission-883"
|
||||
assert restored.events[-1].event_type == "handoff"
|
||||
assert restored.events[-1].note == "resume"
|
||||
assert restored.isolation_profiles[1].mechanism == "mount_namespace"
|
||||
@@ -1,25 +0,0 @@
|
||||
from pathlib import Path
|
||||
|
||||
|
||||
REPORT = Path("reports/night-shift-prediction-2026-04-12.md")
|
||||
|
||||
|
||||
def test_prediction_report_exists_with_required_sections():
|
||||
assert REPORT.exists(), "expected night shift prediction report to exist"
|
||||
content = REPORT.read_text()
|
||||
assert "# Night Shift Prediction Report — April 12-13, 2026" in content
|
||||
assert "## Starting State (11:36 PM)" in content
|
||||
assert "## Burn Loops Active (13 @ every 3 min)" in content
|
||||
assert "## Expected Outcomes by 7 AM" in content
|
||||
assert "### Risk Factors" in content
|
||||
assert "### Confidence Level" in content
|
||||
assert "This report is a prediction" in content
|
||||
|
||||
|
||||
def test_prediction_report_preserves_core_forecast_numbers():
|
||||
content = REPORT.read_text()
|
||||
assert "Total expected API calls: ~2,010" in content
|
||||
assert "Total commits pushed: ~800-1,200" in content
|
||||
assert "Total PRs created: ~150-250" in content
|
||||
assert "the-nexus | 30-50 | 200-300" in content
|
||||
assert "Generated: 2026-04-12 23:36 EDT" in content
|
||||
Reference in New Issue
Block a user