Compare commits

..

1 Commits

Author SHA1 Message Date
Timmy Time
2b9672d82b Fix #1500: Document duplicate PR prevention system
Some checks failed
CI / test (pull_request) Failing after 1m4s
CI / validate (pull_request) Failing after 1m33s
Review Approval Gate / verify-review (pull_request) Failing after 8s
Issue #1500 observed that the pre-flight check successfully prevented
a duplicate PR for #1474. This documentation explains the system.

Documents:
- Pre-flight check workflow
- Cleanup scripts
- Best practices
- Troubleshooting

Refs #1500
2026-04-14 22:23:11 -04:00
21 changed files with 136 additions and 1046 deletions

View File

@@ -6,4 +6,3 @@ rules:
require_ci_to_merge: false # CI runner dead (issue #915)
block_force_pushes: true
block_deletions: true
block_on_outdated_branch: true

View File

@@ -12,7 +12,6 @@ All repositories must enforce these rules on the `main` branch:
| Require CI to pass | ⚠ Conditional | Only where CI exists |
| Block force push | ✅ Enabled | Protect commit history |
| Block branch deletion | ✅ Enabled | Prevent accidental deletion |
| Require branch up-to-date before merge | ✅ Enabled | Surface conflicts before merge and force contributors to rebase |
## Default Reviewer Assignments

View File

@@ -285,49 +285,6 @@ class AgentMemory:
logger.warning(f"Failed to store memory: {e}")
return None
def remember_alexander_request_response(
self,
*,
request_text: str,
response_text: str,
requester: str = "Alexander Whitestone",
source: str = "",
metadata: Optional[dict] = None,
) -> Optional[str]:
"""Store an Alexander request + wizard response artifact in the sovereign room."""
if not self._check_available():
logger.warning("Cannot store Alexander artifact — MemPalace unavailable")
return None
try:
from nexus.mempalace.searcher import add_memory
from nexus.mempalace.conversation_artifacts import build_request_response_artifact
artifact = build_request_response_artifact(
requester=requester,
responder=self.agent_name,
request_text=request_text,
response_text=response_text,
source=source,
)
extra_metadata = dict(artifact.metadata)
if metadata:
extra_metadata.update(metadata)
doc_id = add_memory(
text=artifact.text,
room=artifact.room,
wing=self.wing,
palace_path=self.palace_path,
source_file=source,
extra_metadata=extra_metadata,
)
logger.debug("Stored Alexander request/response artifact in sovereign room")
return doc_id
except Exception as e:
logger.warning(f"Failed to store Alexander artifact: {e}")
return None
def write_diary(
self,
summary: Optional[str] = None,

114
app.js
View File

@@ -10,10 +10,6 @@ import { MemoryOptimizer } from './nexus/components/memory-optimizer.js';
import { MemoryInspect } from './nexus/components/memory-inspect.js';
import { MemoryPulse } from './nexus/components/memory-pulse.js';
import { ReasoningTrace } from './nexus/components/reasoning-trace.js';
import {
createPortalRegistryWatcher,
fetchPortalRegistry,
} from './portal-registry.mjs';
// ═══════════════════════════════════════════
// NEXUS v1.1 — Portal System Update
@@ -87,7 +83,6 @@ let workshopPanelCanvas = null;
let workshopScanMat = null;
let workshopPanelRefreshTimer = 0;
let lastFocusedPortal = null;
let portalRegistryWatcher = null;
// ═══ VISITOR / OPERATOR MODE ═══
let uiMode = 'visitor'; // 'visitor' | 'operator'
@@ -719,10 +714,6 @@ async function init() {
camera = new THREE.PerspectiveCamera(65, window.innerWidth / window.innerHeight, 0.1, 1000);
camera.position.copy(playerPos);
// Initialize avatar and LOD systems
if (window.AvatarCustomization) window.AvatarCustomization.init(scene, camera);
if (window.LODSystem) window.LODSystem.init(scene, camera);
updateLoad(20);
createSkybox();
@@ -736,25 +727,9 @@ async function init() {
// Load Portals from Registry
try {
const portalData = await fetchPortalRegistry({
registryUrl: './portals.json',
cacheBustParam: '_registry_ts',
});
applyPortalRegistry(portalData);
portalRegistryWatcher?.stop?.();
portalRegistryWatcher = createPortalRegistryWatcher({
loadRegistry: () => fetchPortalRegistry({
registryUrl: './portals.json',
cacheBustParam: '_registry_ts',
}),
applyRegistry: (nextRegistry) => applyPortalRegistry(nextRegistry, { announce: true }),
onError: (error) => {
console.error('Failed to hot-reload portals.json:', error);
},
});
await portalRegistryWatcher.prime(portalData);
portalRegistryWatcher.start();
const response = await fetch('./portals.json');
const portalData = await response.json();
createPortals(portalData);
} catch (e) {
console.error('Failed to load portals.json:', e);
addChatMessage('error', 'Portal registry offline. Check logs.');
@@ -1105,7 +1080,7 @@ function refreshWorkshopPanel() {
ctx.fillText(`HERMES STATUS: ${wsConnected ? 'ONLINE' : 'OFFLINE'}`, 40, 120);
ctx.fillStyle = '#7b5cff';
const contextName = activePortal ? activePortal.config.name.toUpperCase() : 'NEXUS CORE';
const contextName = activePortal ? activePortal.name.toUpperCase() : 'NEXUS CORE';
ctx.fillText(`CONTEXT: ${contextName}`, 40, 160);
ctx.fillStyle = '#a0b8d0';
@@ -1552,83 +1527,6 @@ function createVisionPoint(config) {
}
// ═══ PORTAL SYSTEM ═══
function disposeMaterial(material) {
if (!material) return;
if (Array.isArray(material)) {
material.forEach(disposeMaterial);
return;
}
[
'map',
'alphaMap',
'aoMap',
'bumpMap',
'displacementMap',
'emissiveMap',
'envMap',
'lightMap',
'metalnessMap',
'normalMap',
'roughnessMap',
'specularMap',
].forEach((key) => {
material[key]?.dispose?.();
});
if (material.uniforms) {
Object.values(material.uniforms).forEach((uniform) => {
uniform?.value?.dispose?.();
});
}
material.dispose?.();
}
function disposeObject3D(root) {
if (!root?.traverse) return;
root.traverse((node) => {
node.geometry?.dispose?.();
disposeMaterial(node.material);
});
}
function clearPortals() {
portals.forEach((portal) => {
scene?.remove?.(portal.group);
disposeObject3D(portal.group);
portal.group?.clear?.();
});
portals = [];
activePortal = null;
lastFocusedPortal = null;
}
function applyPortalRegistry(data, { announce = false } = {}) {
const previousActivePortalId = activePortal?.config?.id || null;
clearPortals();
createPortals(data);
if (previousActivePortalId) {
activePortal = portals.find((portal) => portal.config.id === previousActivePortalId) || null;
if (!activePortal && portalOverlayActive) {
closePortalOverlay();
}
}
if (atlasOverlayActive) populateAtlas();
checkPortalProximity();
refreshWorkshopPanel();
if (announce) {
addChatMessage('system', `Portal registry hot-reloaded (${data.length} portals).`, false);
}
}
function createPortals(data) {
data.forEach(config => {
const portal = createPortal(config);
@@ -3659,10 +3557,6 @@ function gameLoop() {
if (composer) { composer.render(); } else { renderer.render(scene, camera); }
// Update avatar and LOD systems
if (window.AvatarCustomization && playerPos) window.AvatarCustomization.update(playerPos);
if (window.LODSystem && playerPos) window.LODSystem.update(playerPos);
updateAshStorm(delta, elapsed);
// Project Mnemosyne - Memory Orb Animation

View File

@@ -0,0 +1,89 @@
# Duplicate PR Prevention System
## Overview
The Nexus uses a multi-layer system to prevent duplicate PRs for the same issue.
## Components
### 1. Pre-flight Check (CI)
The `.github/workflows/pr-duplicate-check.yml` workflow runs on every PR creation and checks if a PR already exists for the same issue.
**How it works:**
1. Extracts issue numbers from PR title and body
2. Queries Gitea API for existing PRs referencing those issues
3. Fails the check if duplicates are found
4. Provides links to existing PRs for review
### 2. Cleanup Script
The `scripts/cleanup-duplicate-prs.sh` script helps clean up existing duplicates:
- Lists all PRs for a given issue
- Identifies duplicates
- Provides commands to close duplicates
### 3. Milestone Checker
The `bin/check_duplicate_milestones.py` script prevents duplicate milestones:
- Scans all milestones in the repo
- Identifies duplicates by title
- Reports for manual cleanup
## Usage
### Check for Duplicates Before Creating PR
```bash
# Check if issue already has PRs
curl -s -H "Authorization: token $GITEA_TOKEN" \
"https://forge.alexanderwhitestone.com/api/v1/repos/Timmy_Foundation/the-nexus/pulls?state=open" \
| jq '.[] | select(.body | contains("#ISSUE_NUMBER"))'
```
### Clean Up Existing Duplicates
```bash
# List PRs for issue
./scripts/cleanup-duplicate-prs.sh --issue 1128
# Close duplicates (keep newest)
./scripts/cleanup-duplicate-prs.sh --issue 1128 --close-duplicates
```
## Example: Issue #1500
Issue #1500 documented that the pre-flight check successfully prevented a duplicate PR for #1474.
**What happened:**
1. Dispatch attempted to work on #1474
2. Pre-flight check found 2 existing PRs (#1495, #1493)
3. System prevented creating a 3rd duplicate
4. Issue #1500 was filed as an observation
**Result:** The system worked as intended.
## Best Practices
1. **Always check before creating PRs** — use the pre-flight check
2. **Close duplicates promptly** — don't let them accumulate
3. **Reference issues in PRs** — makes duplicate detection possible
4. **Use descriptive branch names** — helps identify purpose
5. **Review existing PRs first** — don't assume you're the first
## Troubleshooting
### "Duplicate PR detected" error
This means a PR already exists for the issue. Options:
1. Review the existing PR and contribute to it
2. Close your PR if it's truly a duplicate
3. Update your PR to address a different aspect
### Pre-flight check not running
Check that `.github/workflows/pr-duplicate-check.yml` exists and is enabled.
### False positives
The check looks for issue numbers in PR body. If you're referencing an issue without intending to fix it, use "Refs #" instead of "Fixes #".

View File

@@ -395,8 +395,6 @@
<div id="memory-connections-panel" class="memory-connections-panel" style="display:none;" aria-label="Memory Connections Panel"></div>
<script src="./boot.js"></script>
<script src="./avatar-customization.js"></script>
<script src="./lod-system.js"></script>
<script>
function openMemoryFilter() { renderFilterList(); document.getElementById('memory-filter').style.display = 'flex'; }
function closeMemoryFilter() { document.getElementById('memory-filter').style.display = 'none'; }

View File

@@ -1,186 +0,0 @@
/**
* LOD (Level of Detail) System for The Nexus
*
* Optimizes rendering when many avatars/users are visible:
* - Distance-based LOD: far users become billboard sprites
* - Occlusion: skip rendering users behind walls
* - Budget: maintain 60 FPS target with 50+ avatars
*
* Usage:
* LODSystem.init(scene, camera);
* LODSystem.registerAvatar(avatarMesh, userId);
* LODSystem.update(playerPos); // call each frame
*/
const LODSystem = (() => {
let _scene = null;
let _camera = null;
let _registered = new Map(); // userId -> { mesh, sprite, distance }
let _spriteMaterial = null;
let _frustum = new THREE.Frustum();
let _projScreenMatrix = new THREE.Matrix4();
// Thresholds
const LOD_NEAR = 15; // Full mesh within 15 units
const LOD_FAR = 40; // Billboard beyond 40 units
const LOD_CULL = 80; // Don't render beyond 80 units
const SPRITE_SIZE = 1.2;
function init(sceneRef, cameraRef) {
_scene = sceneRef;
_camera = cameraRef;
// Create shared sprite material
const canvas = document.createElement('canvas');
canvas.width = 64;
canvas.height = 64;
const ctx = canvas.getContext('2d');
// Simple avatar indicator: colored circle
ctx.fillStyle = '#00ffcc';
ctx.beginPath();
ctx.arc(32, 32, 20, 0, Math.PI * 2);
ctx.fill();
ctx.fillStyle = '#0a0f1a';
ctx.beginPath();
ctx.arc(32, 28, 8, 0, Math.PI * 2); // head
ctx.fill();
const texture = new THREE.CanvasTexture(canvas);
_spriteMaterial = new THREE.SpriteMaterial({
map: texture,
transparent: true,
depthTest: true,
sizeAttenuation: true,
});
console.log('[LODSystem] Initialized');
}
function registerAvatar(avatarMesh, userId, color) {
// Create billboard sprite for this avatar
const spriteMat = _spriteMaterial.clone();
if (color) {
// Tint sprite to match avatar color
const canvas = document.createElement('canvas');
canvas.width = 64;
canvas.height = 64;
const ctx = canvas.getContext('2d');
ctx.fillStyle = color;
ctx.beginPath();
ctx.arc(32, 32, 20, 0, Math.PI * 2);
ctx.fill();
ctx.fillStyle = '#0a0f1a';
ctx.beginPath();
ctx.arc(32, 28, 8, 0, Math.PI * 2);
ctx.fill();
spriteMat.map = new THREE.CanvasTexture(canvas);
spriteMat.map.needsUpdate = true;
}
const sprite = new THREE.Sprite(spriteMat);
sprite.scale.set(SPRITE_SIZE, SPRITE_SIZE, 1);
sprite.visible = false;
_scene.add(sprite);
_registered.set(userId, {
mesh: avatarMesh,
sprite: sprite,
distance: Infinity,
});
}
function unregisterAvatar(userId) {
const entry = _registered.get(userId);
if (entry) {
_scene.remove(entry.sprite);
entry.sprite.material.dispose();
_registered.delete(userId);
}
}
function setSpriteColor(userId, color) {
const entry = _registered.get(userId);
if (!entry) return;
const canvas = document.createElement('canvas');
canvas.width = 64;
canvas.height = 64;
const ctx = canvas.getContext('2d');
ctx.fillStyle = color;
ctx.beginPath();
ctx.arc(32, 32, 20, 0, Math.PI * 2);
ctx.fill();
ctx.fillStyle = '#0a0f1a';
ctx.beginPath();
ctx.arc(32, 28, 8, 0, Math.PI * 2);
ctx.fill();
entry.sprite.material.map = new THREE.CanvasTexture(canvas);
entry.sprite.material.map.needsUpdate = true;
}
function update(playerPos) {
if (!_camera) return;
// Update frustum for culling
_projScreenMatrix.multiplyMatrices(
_camera.projectionMatrix,
_camera.matrixWorldInverse
);
_frustum.setFromProjectionMatrix(_projScreenMatrix);
_registered.forEach((entry, userId) => {
if (!entry.mesh) return;
const meshPos = entry.mesh.position;
const distance = playerPos.distanceTo(meshPos);
entry.distance = distance;
// Beyond cull distance: hide everything
if (distance > LOD_CULL) {
entry.mesh.visible = false;
entry.sprite.visible = false;
return;
}
// Check if in camera frustum
const inFrustum = _frustum.containsPoint(meshPos);
if (!inFrustum) {
entry.mesh.visible = false;
entry.sprite.visible = false;
return;
}
// LOD switching
if (distance <= LOD_NEAR) {
// Near: full mesh
entry.mesh.visible = true;
entry.sprite.visible = false;
} else if (distance <= LOD_FAR) {
// Mid: mesh with reduced detail (keep mesh visible)
entry.mesh.visible = true;
entry.sprite.visible = false;
} else {
// Far: billboard sprite
entry.mesh.visible = false;
entry.sprite.visible = true;
entry.sprite.position.copy(meshPos);
entry.sprite.position.y += 1.2; // above avatar center
}
});
}
function getStats() {
let meshCount = 0;
let spriteCount = 0;
let culledCount = 0;
_registered.forEach(entry => {
if (entry.mesh.visible) meshCount++;
else if (entry.sprite.visible) spriteCount++;
else culledCount++;
});
return { total: _registered.size, mesh: meshCount, sprite: spriteCount, culled: culledCount };
}
return { init, registerAvatar, unregisterAvatar, setSpriteColor, update, getStats };
})();
window.LODSystem = LODSystem;

View File

@@ -62,15 +62,6 @@ core_rooms:
- proof-of-concept code snippets
- benchmark data
- key: sovereign
label: Sovereign
purpose: Artifacts of Alexander Whitestone's requests, directives, and wizard responses
examples:
- dated request/response artifacts
- conversation summaries with speaker tags
- directive ledgers
- response follow-through notes
optional_rooms:
- key: evennia
label: Evennia
@@ -107,6 +98,15 @@ optional_rooms:
purpose: Catch-all for artefacts not yet assigned to a named room
wizards: ["*"]
- key: sovereign
label: Sovereign
purpose: Artifacts of Alexander Whitestone's requests, directives, and conversation history
wizards: ["*"]
conventions:
naming: "YYYY-MM-DD_HHMMSS_<topic>.md"
index: "INDEX.md"
description: "Each artifact is a dated record of a request from Alexander and the wizard's response. The running INDEX.md provides a chronological catalog."
# Tunnel routing table
# Defines which room pairs are connected across wizard wings.
# A tunnel lets `recall <query> --fleet` search both wings at once.

View File

@@ -13,12 +13,6 @@ from __future__ import annotations
from nexus.mempalace.config import MEMPALACE_PATH, FLEET_WING
from nexus.mempalace.searcher import search_memories, add_memory, MemPalaceResult
from nexus.mempalace.conversation_artifacts import (
ConversationArtifact,
build_request_response_artifact,
extract_alexander_request_pairs,
normalize_speaker,
)
__all__ = [
"MEMPALACE_PATH",
@@ -26,8 +20,4 @@ __all__ = [
"search_memories",
"add_memory",
"MemPalaceResult",
"ConversationArtifact",
"build_request_response_artifact",
"extract_alexander_request_pairs",
"normalize_speaker",
]

View File

@@ -40,7 +40,6 @@ CORE_ROOMS: list[str] = [
"nexus", # reports, docs, KT
"issues", # tickets, backlog
"experiments", # prototypes, spikes
"sovereign", # Alexander request/response artifacts
]
# ── ChromaDB collection name ──────────────────────────────────────────────────

View File

@@ -1,122 +0,0 @@
"""Helpers for preserving Alexander request/response artifacts in MemPalace.
This module provides a small, typed bridge between raw conversation turns and
MemPalace drawers stored in the shared `sovereign` room. The goal is not to
solve all future speaker-tagging needs at once; it gives the Nexus one
canonical artifact shape that other miners and bridges can reuse.
"""
from __future__ import annotations
from dataclasses import dataclass, field
from datetime import datetime, timezone
from typing import Iterable
_ALEXANDER_ALIASES = {
"alexander",
"alexander whitestone",
"rockachopa",
"triptimmy",
}
@dataclass(frozen=True)
class ConversationArtifact:
requester: str
responder: str
request_text: str
response_text: str
room: str = "sovereign"
timestamp: str = field(default_factory=lambda: datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ"))
metadata: dict = field(default_factory=dict)
@property
def text(self) -> str:
return (
f"# Conversation Artifact\n\n"
f"## Alexander Request\n{self.request_text.strip()}\n\n"
f"## Wizard Response\n{self.response_text.strip()}\n"
)
def normalize_speaker(name: str | None) -> str:
cleaned = " ".join((name or "").strip().lower().split())
if cleaned in _ALEXANDER_ALIASES:
return "alexander"
return cleaned.replace(" ", "_") or "unknown"
def build_request_response_artifact(
*,
requester: str,
responder: str,
request_text: str,
response_text: str,
source: str = "",
timestamp: str | None = None,
request_timestamp: str | None = None,
response_timestamp: str | None = None,
) -> ConversationArtifact:
requester_slug = normalize_speaker(requester)
responder_slug = normalize_speaker(responder)
ts = timestamp or datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
metadata = {
"artifact_type": "alexander_request_response",
"requester": requester_slug,
"responder": responder_slug,
"speaker_tags": [f"speaker:{requester_slug}", f"speaker:{responder_slug}"],
"source": source,
"timestamp": ts,
}
if request_timestamp:
metadata["request_timestamp"] = request_timestamp
if response_timestamp:
metadata["response_timestamp"] = response_timestamp
return ConversationArtifact(
requester=requester_slug,
responder=responder_slug,
request_text=request_text,
response_text=response_text,
timestamp=ts,
metadata=metadata,
)
def extract_alexander_request_pairs(
turns: Iterable[dict],
*,
responder: str,
source: str = "",
) -> list[ConversationArtifact]:
responder_slug = normalize_speaker(responder)
pending_request: dict | None = None
artifacts: list[ConversationArtifact] = []
for turn in turns:
speaker = normalize_speaker(
turn.get("speaker") or turn.get("username") or turn.get("author") or turn.get("name")
)
text = (turn.get("text") or turn.get("content") or "").strip()
if not text:
continue
if speaker == "alexander":
pending_request = turn
continue
if speaker == responder_slug and pending_request is not None:
artifacts.append(
build_request_response_artifact(
requester="alexander",
responder=responder_slug,
request_text=(pending_request.get("text") or pending_request.get("content") or "").strip(),
response_text=text,
source=source,
request_timestamp=pending_request.get("timestamp"),
response_timestamp=turn.get("timestamp"),
timestamp=turn.get("timestamp") or pending_request.get("timestamp"),
)
)
pending_request = None
return artifacts

View File

@@ -1,123 +0,0 @@
export const DEFAULT_PORTAL_REGISTRY_POLL_MS = 5000;
export const DEFAULT_PORTAL_REGISTRY_CACHE_BUST_PARAM = '_registry_ts';
export function getPortalRegistrySignature(registry) {
return JSON.stringify(registry);
}
export function buildPortalRegistryRequestUrl(
registryUrl = './portals.json',
cacheBustValue = Date.now(),
baseHref = typeof window !== 'undefined' && window.location ? window.location.href : 'http://localhost/',
cacheBustParam = DEFAULT_PORTAL_REGISTRY_CACHE_BUST_PARAM,
) {
const url = new URL(registryUrl, baseHref);
if (cacheBustValue !== null && cacheBustValue !== undefined) {
url.searchParams.set(cacheBustParam, String(cacheBustValue));
}
return url.toString();
}
export async function fetchPortalRegistry({
fetchImpl = fetch,
registryUrl = './portals.json',
baseHref = typeof window !== 'undefined' && window.location ? window.location.href : 'http://localhost/',
cacheBustValue = Date.now(),
cacheBustParam = DEFAULT_PORTAL_REGISTRY_CACHE_BUST_PARAM,
} = {}) {
const requestUrl = buildPortalRegistryRequestUrl(
registryUrl,
cacheBustValue,
baseHref,
cacheBustParam,
);
const response = await fetchImpl(requestUrl, {
cache: 'no-store',
headers: { 'Cache-Control': 'no-cache' },
});
if (!response.ok) {
throw new Error(`Failed to load ${registryUrl}: ${response.status}`);
}
const registry = await response.json();
if (!Array.isArray(registry)) {
throw new Error(`${registryUrl} must be a JSON array`);
}
return registry;
}
export function createPortalRegistryWatcher({
loadRegistry,
applyRegistry,
onError = (error) => console.error('Portal registry watch failed:', error),
intervalMs = DEFAULT_PORTAL_REGISTRY_POLL_MS,
setIntervalImpl = setInterval,
clearIntervalImpl = clearInterval,
signatureFn = getPortalRegistrySignature,
} = {}) {
if (typeof loadRegistry !== 'function') {
throw new TypeError('loadRegistry must be a function');
}
if (typeof applyRegistry !== 'function') {
throw new TypeError('applyRegistry must be a function');
}
let intervalId = null;
let lastSignature = null;
async function refresh() {
try {
const registry = await loadRegistry();
const nextSignature = signatureFn(registry);
if (nextSignature === lastSignature) {
return {
changed: false,
registry,
previousSignature: lastSignature,
nextSignature,
};
}
const previousSignature = lastSignature;
lastSignature = nextSignature;
applyRegistry(registry, { previousSignature, nextSignature });
return {
changed: true,
registry,
previousSignature,
nextSignature,
};
} catch (error) {
onError(error);
return { changed: false, error };
}
}
return {
async prime(registry) {
lastSignature = signatureFn(registry);
return lastSignature;
},
getLastSignature() {
return lastSignature;
},
async refresh() {
return refresh();
},
start() {
if (intervalId !== null) {
return intervalId;
}
intervalId = setIntervalImpl(() => {
void refresh();
}, intervalMs);
return intervalId;
},
stop() {
if (intervalId !== null) {
clearIntervalImpl(intervalId);
intervalId = null;
}
},
};
}

View File

@@ -129,22 +129,13 @@
"type": "harness",
"params": {
"mode": "creative"
},
"action_label": "Enter Workshop"
}
},
"agents_present": [
"timmy",
"kimi"
],
"interaction_ready": true,
"portal_type": "harness",
"world_category": "creative",
"environment": "production",
"access_mode": "visitor",
"readiness_state": "online",
"telemetry_source": "workshop.timmy.foundation",
"owner": "Timmy",
"blocked_reason": null
"interaction_ready": true
},
{
"id": "archive",
@@ -166,21 +157,12 @@
"type": "harness",
"params": {
"mode": "read"
},
"action_label": "Enter Archive"
}
},
"agents_present": [
"claude"
],
"interaction_ready": true,
"portal_type": "harness",
"world_category": "knowledge",
"environment": "production",
"access_mode": "visitor",
"readiness_state": "online",
"telemetry_source": "archive.timmy.foundation",
"owner": "Timmy",
"blocked_reason": null
"interaction_ready": true
},
{
"id": "chapel",
@@ -202,19 +184,10 @@
"type": "harness",
"params": {
"mode": "meditation"
},
"action_label": "Enter Chapel"
}
},
"agents_present": [],
"interaction_ready": true,
"portal_type": "harness",
"world_category": "reflection",
"environment": "production",
"access_mode": "visitor",
"readiness_state": "online",
"telemetry_source": "chapel.timmy.foundation",
"owner": "Timmy",
"blocked_reason": null
"interaction_ready": true
},
{
"id": "courtyard",
@@ -236,22 +209,13 @@
"type": "harness",
"params": {
"mode": "social"
},
"action_label": "Enter Courtyard"
}
},
"agents_present": [
"timmy",
"perplexity"
],
"interaction_ready": true,
"portal_type": "harness",
"world_category": "social",
"environment": "production",
"access_mode": "visitor",
"readiness_state": "online",
"telemetry_source": "courtyard.timmy.foundation",
"owner": "Timmy",
"blocked_reason": null
"interaction_ready": true
},
{
"id": "gate",
@@ -273,19 +237,10 @@
"type": "harness",
"params": {
"mode": "transit"
},
"action_label": "Open Gate"
}
},
"agents_present": [],
"interaction_ready": false,
"portal_type": "harness",
"world_category": "transit",
"environment": "production",
"access_mode": "visitor",
"readiness_state": "standby",
"telemetry_source": "gate.timmy.foundation",
"owner": "Timmy",
"blocked_reason": "Transit gate staged but not interaction ready."
"interaction_ready": false
},
{
"id": "playground",
@@ -337,4 +292,4 @@
"agents_present": [],
"interaction_ready": true
}
]
]

View File

@@ -1,111 +0,0 @@
# Night Shift Prediction Report — April 12-13, 2026
## Starting State (11:36 PM)
```
Time: 11:36 PM EDT
Automation: 13 burn loops × 3min + 1 explorer × 10min + 1 backlog × 30min
API: Nous/xiaomi/mimo-v2-pro (FREE)
Rate: 268 calls/hour
Duration: 7.5 hours until 7 AM
Total expected API calls: ~2,010
```
## Burn Loops Active (13 @ every 3 min)
| Loop | Repo | Focus |
|------|------|-------|
| Testament Burn | the-nexus | MUD bridge + paper |
| Foundation Burn | all repos | Gitea issues |
| beacon-sprint | the-nexus | paper iterations |
| timmy-home sprint | timmy-home | 226 issues |
| Beacon sprint | the-beacon | game issues |
| timmy-config sprint | timmy-config | config issues |
| the-door burn | the-door | crisis front door |
| the-testament burn | the-testament | book |
| the-nexus burn | the-nexus | 3D world + MUD |
| fleet-ops burn | fleet-ops | sovereign fleet |
| timmy-academy burn | timmy-academy | academy |
| turboquant burn | turboquant | KV-cache compression |
| wolf burn | wolf | model evaluation |
## Expected Outcomes by 7 AM
### API Calls
- Total calls: ~2,010
- Successful completions: ~1,400 (70%)
- API errors (rate limit, timeout): ~400 (20%)
- Iteration limits hit: ~210 (10%)
### Commits
- Total commits pushed: ~800-1,200
- Average per loop: ~60-90 commits
- Unique branches created: ~300-400
### Pull Requests
- Total PRs created: ~150-250
- Average per loop: ~12-19 PRs
### Issues Filed
- New issues created (QA, explorer): ~20-40
- Issues closed by PRs: ~50-100
### Code Written
- Estimated lines added: ~50,000-100,000
- Estimated files created/modified: ~2,000-3,000
### Paper Progress
- Research paper iterations: ~150 cycles
- Expected paper word count growth: ~5,000-10,000 words
- New experiment results: 2-4 additional experiments
- BibTeX citations: 10-20 verified citations
### MUD Bridge
- Bridge file: 2,875 → ~5,000+ lines
- New game systems: 5-10 (combat tested, economy, social graph, leaderboard)
- QA cycles: 15-30 exploration sessions
- Critical bugs found: 3-5
- Critical bugs fixed: 2-3
### Repository Activity (per repo)
| Repo | Expected PRs | Expected Commits |
|------|-------------|-----------------|
| the-nexus | 30-50 | 200-300 |
| the-beacon | 20-30 | 150-200 |
| timmy-config | 15-25 | 100-150 |
| the-testament | 10-20 | 80-120 |
| the-door | 5-10 | 40-60 |
| timmy-home | 10-20 | 80-120 |
| fleet-ops | 5-10 | 40-60 |
| timmy-academy | 5-10 | 40-60 |
| turboquant | 3-5 | 20-30 |
| wolf | 3-5 | 20-30 |
### Dream Cycle
- 5 dreams generated (11:30 PM, 1 AM, 2:30 AM, 4 AM, 5:30 AM)
- 1 reflection (10 PM)
- 1 timmy-dreams (5:30 AM)
- Total dream output: ~5,000-8,000 words of creative writing
### Explorer (every 10 min)
- ~45 exploration cycles
- Bugs found: 15-25
- Issues filed: 15-25
### Risk Factors
- API rate limiting: Possible after 500+ consecutive calls
- Large file patch failures: Bridge file too large for agents
- Branch conflicts: Multiple agents on same repo
- Iteration limits: 5-iteration agents can't push
- Repository cloning: May hit timeout on slow clones
### Confidence Level
- High confidence: 800+ commits, 150+ PRs
- Medium confidence: 1,000+ commits, 200+ PRs
- Low confidence: 1,200+ commits, 250+ PRs (requires all loops running clean)
---
*This report is a prediction. The 7 AM morning report will compare actual results.*
*Generated: 2026-04-12 23:36 EDT*
*Author: Timmy (pre-shift prediction)*

View File

@@ -4,61 +4,48 @@ Sync branch protection rules from .gitea/branch-protection/*.yml to Gitea.
Correctly uses the Gitea 1.25+ API (not GitHub-style).
"""
from __future__ import annotations
import json
import os
import sys
import json
import urllib.request
from pathlib import Path
import yaml
GITEA_URL = os.getenv("GITEA_URL", "https://forge.alexanderwhitestone.com")
GITEA_TOKEN = os.getenv("GITEA_TOKEN", "")
ORG = "Timmy_Foundation"
PROJECT_ROOT = Path(__file__).resolve().parent.parent
CONFIG_DIR = PROJECT_ROOT / ".gitea" / "branch-protection"
CONFIG_DIR = ".gitea/branch-protection"
def api_request(method: str, path: str, payload: dict | None = None) -> dict:
url = f"{GITEA_URL}/api/v1{path}"
data = json.dumps(payload).encode() if payload else None
req = urllib.request.Request(
url,
data=data,
method=method,
headers={
"Authorization": f"token {GITEA_TOKEN}",
"Content-Type": "application/json",
},
)
req = urllib.request.Request(url, data=data, method=method, headers={
"Authorization": f"token {GITEA_TOKEN}",
"Content-Type": "application/json",
})
with urllib.request.urlopen(req, timeout=30) as resp:
return json.loads(resp.read().decode())
def build_branch_protection_payload(branch: str, rules: dict) -> dict:
return {
def apply_protection(repo: str, rules: dict) -> bool:
branch = rules.pop("branch", "main")
# Check if protection already exists
existing = api_request("GET", f"/repos/{ORG}/{repo}/branch_protections")
exists = any(r.get("branch_name") == branch for r in existing)
payload = {
"branch_name": branch,
"rule_name": branch,
"required_approvals": rules.get("required_approvals", 1),
"block_on_rejected_reviews": rules.get("block_on_rejected_reviews", True),
"dismiss_stale_approvals": rules.get("dismiss_stale_approvals", True),
"block_deletions": rules.get("block_deletions", True),
"block_force_push": rules.get("block_force_push", rules.get("block_force_pushes", True)),
"block_force_push": rules.get("block_force_push", True),
"block_admin_merge_override": rules.get("block_admin_merge_override", True),
"enable_status_check": rules.get("require_ci_to_merge", False),
"status_check_contexts": rules.get("status_check_contexts", []),
"block_on_outdated_branch": rules.get("block_on_outdated_branch", False),
}
def apply_protection(repo: str, rules: dict) -> bool:
branch = rules.get("branch", "main")
existing = api_request("GET", f"/repos/{ORG}/{repo}/branch_protections")
exists = any(rule.get("branch_name") == branch for rule in existing)
payload = build_branch_protection_payload(branch, rules)
try:
if exists:
api_request("PATCH", f"/repos/{ORG}/{repo}/branch_protections/{branch}", payload)
@@ -66,8 +53,8 @@ def apply_protection(repo: str, rules: dict) -> bool:
api_request("POST", f"/repos/{ORG}/{repo}/branch_protections", payload)
print(f"{repo}:{branch} synced")
return True
except Exception as exc:
print(f"{repo}:{branch} failed: {exc}")
except Exception as e:
print(f"{repo}:{branch} failed: {e}")
return False
@@ -75,18 +62,15 @@ def main() -> int:
if not GITEA_TOKEN:
print("ERROR: GITEA_TOKEN not set")
return 1
if not CONFIG_DIR.exists():
print(f"ERROR: config directory not found: {CONFIG_DIR}")
return 1
ok = 0
for cfg_path in sorted(CONFIG_DIR.glob("*.yml")):
repo = cfg_path.stem
with cfg_path.open() as fh:
cfg = yaml.safe_load(fh) or {}
rules = cfg.get("rules", {})
rules.setdefault("branch", cfg.get("branch", "main"))
if apply_protection(repo, rules):
for fname in os.listdir(CONFIG_DIR):
if not fname.endswith(".yml"):
continue
repo = fname[:-4]
with open(os.path.join(CONFIG_DIR, fname)) as f:
cfg = yaml.safe_load(f)
if apply_protection(repo, cfg.get("rules", {})):
ok += 1
print(f"\nSynced {ok} repo(s)")

View File

@@ -1,71 +0,0 @@
import test from 'node:test';
import assert from 'node:assert/strict';
import {
buildPortalRegistryRequestUrl,
createPortalRegistryWatcher,
getPortalRegistrySignature,
} from '../portal-registry.mjs';
test('buildPortalRegistryRequestUrl appends a cache-busting query without dropping existing params', () => {
const url = buildPortalRegistryRequestUrl('./portals.json?mode=atlas', 42, 'https://nexus.test/world/index.html');
assert.equal(
url,
'https://nexus.test/world/portals.json?mode=atlas&_registry_ts=42'
);
});
test('portal registry watcher only reapplies the world when portals.json actually changes', async () => {
const applied = [];
const snapshots = [
[{ id: 'forge', status: 'online' }],
[{ id: 'forge', status: 'online' }, { id: 'archive', status: 'online' }],
];
let tick;
const started = [];
const cleared = [];
const watcher = createPortalRegistryWatcher({
intervalMs: 2500,
loadRegistry: async () => {
const next = snapshots.shift();
if (!next) throw new Error('no more snapshots');
return next;
},
applyRegistry: (registry, meta) => {
applied.push({ ids: registry.map((portal) => portal.id), meta });
},
onError: (error) => {
throw error;
},
setIntervalImpl: (fn, ms) => {
tick = fn;
started.push(ms);
return 99;
},
clearIntervalImpl: (id) => {
cleared.push(id);
},
});
await watcher.prime([{ id: 'forge', status: 'online' }]);
assert.equal(
watcher.getLastSignature(),
getPortalRegistrySignature([{ id: 'forge', status: 'online' }])
);
watcher.start();
assert.deepEqual(started, [2500]);
await tick();
assert.equal(applied.length, 0, 'same registry should not trigger a rebuild');
await tick();
assert.equal(applied.length, 1, 'changed registry should trigger one rebuild');
assert.deepEqual(applied[0].ids, ['forge', 'archive']);
assert.match(applied[0].meta.previousSignature, /forge/);
assert.match(applied[0].meta.nextSignature, /archive/);
watcher.stop();
assert.deepEqual(cleared, [99]);
});

View File

@@ -20,7 +20,6 @@ from agent.memory import (
SessionTranscript,
create_agent_memory,
)
from nexus.mempalace.conversation_artifacts import ConversationArtifact
from agent.memory_hooks import MemoryHooks
@@ -185,24 +184,6 @@ class TestAgentMemory:
doc_id = mem.write_diary()
assert doc_id is None # MemPalace unavailable
def test_remember_alexander_request_response_uses_sovereign_room(self):
mem = AgentMemory(agent_name="allegro")
mem._available = True
with patch("nexus.mempalace.searcher.add_memory", return_value="doc-123") as add_memory:
doc_id = mem.remember_alexander_request_response(
request_text="Catalog my requests.",
response_text="I will preserve them as artifacts.",
requester="Alexander Whitestone",
source="telegram:timmy-time",
)
assert doc_id == "doc-123"
kwargs = add_memory.call_args.kwargs
assert kwargs["room"] == "sovereign"
assert kwargs["wing"] == mem.wing
assert kwargs["extra_metadata"]["artifact_type"] == "alexander_request_response"
assert kwargs["extra_metadata"]["speaker_tags"] == ["speaker:alexander", "speaker:allegro"]
# ---------------------------------------------------------------------------
# MemoryHooks tests

View File

@@ -1,58 +0,0 @@
from pathlib import Path
import yaml
from nexus.mempalace.config import CORE_ROOMS
from nexus.mempalace.conversation_artifacts import (
ConversationArtifact,
build_request_response_artifact,
extract_alexander_request_pairs,
normalize_speaker,
)
def test_sovereign_room_is_core_room() -> None:
assert "sovereign" in CORE_ROOMS
rooms_yaml = yaml.safe_load(Path("mempalace/rooms.yaml").read_text())
assert any(room["key"] == "sovereign" for room in rooms_yaml["core_rooms"])
def test_normalize_speaker_maps_alexander_variants() -> None:
assert normalize_speaker("Alexander Whitestone") == "alexander"
assert normalize_speaker("Rockachopa") == "alexander"
assert normalize_speaker(" ALEXANDER ") == "alexander"
assert normalize_speaker("Bezalel") == "bezalel"
def test_build_request_response_artifact_creates_sovereign_metadata() -> None:
artifact = build_request_response_artifact(
requester="Alexander Whitestone",
responder="Allegro",
request_text="Please organize my conversation artifacts.",
response_text="I will catalog them under a sovereign room.",
source="telegram:timmy-time",
timestamp="2026-04-16T01:30:00Z",
)
assert isinstance(artifact, ConversationArtifact)
assert artifact.room == "sovereign"
assert artifact.metadata["speaker_tags"] == ["speaker:alexander", "speaker:allegro"]
assert artifact.metadata["artifact_type"] == "alexander_request_response"
assert artifact.metadata["responder"] == "allegro"
assert "## Alexander Request" in artifact.text
assert "## Wizard Response" in artifact.text
def test_extract_alexander_request_pairs_finds_following_wizard_response() -> None:
turns = [
{"speaker": "Alexander Whitestone", "text": "Catalog my requests as artifacts.", "timestamp": "2026-04-16T01:00:00Z"},
{"speaker": "Allegro", "text": "I'll build a sovereign room contract.", "timestamp": "2026-04-16T01:01:00Z"},
{"speaker": "Alexander", "text": "Make sure my words are easy to recall.", "timestamp": "2026-04-16T01:02:00Z"},
{"speaker": "Allegro", "text": "I will tag them with speaker metadata.", "timestamp": "2026-04-16T01:03:00Z"},
]
artifacts = extract_alexander_request_pairs(turns, responder="Allegro", source="telegram")
assert len(artifacts) == 2
assert artifacts[0].metadata["request_timestamp"] == "2026-04-16T01:00:00Z"
assert artifacts[1].metadata["response_timestamp"] == "2026-04-16T01:03:00Z"

View File

@@ -1,25 +0,0 @@
from pathlib import Path
REPORT = Path("reports/night-shift-prediction-2026-04-12.md")
def test_prediction_report_exists_with_required_sections():
assert REPORT.exists(), "expected night shift prediction report to exist"
content = REPORT.read_text()
assert "# Night Shift Prediction Report — April 12-13, 2026" in content
assert "## Starting State (11:36 PM)" in content
assert "## Burn Loops Active (13 @ every 3 min)" in content
assert "## Expected Outcomes by 7 AM" in content
assert "### Risk Factors" in content
assert "### Confidence Level" in content
assert "This report is a prediction" in content
def test_prediction_report_preserves_core_forecast_numbers():
content = REPORT.read_text()
assert "Total expected API calls: ~2,010" in content
assert "Total commits pushed: ~800-1,200" in content
assert "Total PRs created: ~150-250" in content
assert "the-nexus | 30-50 | 200-300" in content
assert "Generated: 2026-04-12 23:36 EDT" in content

View File

@@ -1,14 +0,0 @@
from pathlib import Path
APP_JS = Path("app.js")
def test_app_wires_portal_registry_hot_reload_loop() -> None:
source = APP_JS.read_text()
assert "createPortalRegistryWatcher" in source
assert "fetchPortalRegistry" in source
assert "applyPortalRegistry(" in source
assert "portalRegistryWatcher.start()" in source
assert "_registry_ts" in source

View File

@@ -1,45 +0,0 @@
from __future__ import annotations
import importlib.util
import sys
from pathlib import Path
import yaml
PROJECT_ROOT = Path(__file__).parent.parent
_spec = importlib.util.spec_from_file_location(
"sync_branch_protection_test",
PROJECT_ROOT / "scripts" / "sync_branch_protection.py",
)
_mod = importlib.util.module_from_spec(_spec)
sys.modules["sync_branch_protection_test"] = _mod
_spec.loader.exec_module(_mod)
build_branch_protection_payload = _mod.build_branch_protection_payload
def test_build_branch_protection_payload_enables_rebase_before_merge():
payload = build_branch_protection_payload(
"main",
{
"required_approvals": 1,
"dismiss_stale_approvals": True,
"require_ci_to_merge": False,
"block_deletions": True,
"block_force_push": True,
"block_on_outdated_branch": True,
},
)
assert payload["branch_name"] == "main"
assert payload["rule_name"] == "main"
assert payload["block_on_outdated_branch"] is True
assert payload["required_approvals"] == 1
assert payload["enable_status_check"] is False
def test_the_nexus_branch_protection_config_requires_up_to_date_branch():
config = yaml.safe_load((PROJECT_ROOT / ".gitea" / "branch-protection" / "the-nexus.yml").read_text())
rules = config["rules"]
assert rules["block_on_outdated_branch"] is True