Compare commits

..

14 Commits

Author SHA1 Message Date
Alexander Whitestone
f8a0556c26 feat: add portal health checks for #1539
Some checks failed
CI / test (pull_request) Failing after 59s
Review Approval Gate / verify-review (pull_request) Failing after 8s
CI / validate (pull_request) Failing after 58s
2026-04-17 02:00:08 -04:00
Alexander Whitestone
9f6cdf4b92 test: define portal health checks for #1539 2026-04-17 01:38:46 -04:00
b9bbcae298 Merge PR #1622
Merged PR #1622: feat: add sovereign conversation artifacts slice
2026-04-17 01:51:44 +00:00
Alexander Whitestone
b7bf532f4e feat: add sovereign conversation artifacts slice (#1117)
Some checks failed
CI / test (pull_request) Failing after 52s
Review Approval Gate / verify-review (pull_request) Failing after 6s
CI / validate (pull_request) Failing after 45s
2026-04-15 22:58:44 -04:00
Alexander Whitestone
95d485160a test: define conversation artifact acceptance for #1117 2026-04-15 22:51:22 -04:00
7dff8a4b5e Merge pull request 'feat: Three.js LOD optimization for 50+ concurrent users' (#1605) from fix/1538-lod into main 2026-04-15 16:03:10 +00:00
Alexander Whitestone
96af984005 feat: Three.js LOD optimization for 50+ concurrent users (closes #1538)
Some checks failed
CI / test (pull_request) Failing after 1m27s
CI / validate (pull_request) Failing after 50s
Review Approval Gate / verify-review (pull_request) Successful in 9s
2026-04-15 11:38:26 -04:00
27aa29f9c8 Merge pull request 'feat: enforce rebase-before-merge branch protection (#1253)' (#1596) from fix/1253 into main 2026-04-15 11:56:26 +00:00
39cf447ee0 docs: document rebase-before-merge protection (#1253)
Some checks failed
CI / test (pull_request) Failing after 1m8s
Review Approval Gate / verify-review (pull_request) Successful in 9s
CI / validate (pull_request) Failing after 1m25s
2026-04-15 09:59:17 +00:00
fe5b9c8b75 feat: codify rebase-before-merge protection (#1253) 2026-04-15 09:59:15 +00:00
871188ec12 feat: codify rebase-before-merge protection (#1253) 2026-04-15 09:59:12 +00:00
9482403a23 wip: add rebase-before-merge protection tests 2026-04-15 09:59:10 +00:00
bd0497b998 Merge PR #1585: docs: add night shift prediction report (#1353) 2026-04-15 06:13:22 +00:00
Alexander Whitestone
4ab84a59ab docs: add night shift prediction report (#1353)
Some checks failed
CI / test (pull_request) Failing after 50s
CI / validate (pull_request) Failing after 1m10s
Review Approval Gate / verify-review (pull_request) Successful in 16s
2026-04-15 02:02:26 -04:00
23 changed files with 1223 additions and 275 deletions

View File

@@ -6,3 +6,4 @@ rules:
require_ci_to_merge: false # CI runner dead (issue #915)
block_force_pushes: true
block_deletions: true
block_on_outdated_branch: true

View File

@@ -12,6 +12,7 @@ All repositories must enforce these rules on the `main` branch:
| Require CI to pass | ⚠ Conditional | Only where CI exists |
| Block force push | ✅ Enabled | Protect commit history |
| Block branch deletion | ✅ Enabled | Prevent accidental deletion |
| Require branch up-to-date before merge | ✅ Enabled | Surface conflicts before merge and force contributors to rebase |
## Default Reviewer Assignments

View File

@@ -285,6 +285,49 @@ class AgentMemory:
logger.warning(f"Failed to store memory: {e}")
return None
def remember_alexander_request_response(
self,
*,
request_text: str,
response_text: str,
requester: str = "Alexander Whitestone",
source: str = "",
metadata: Optional[dict] = None,
) -> Optional[str]:
"""Store an Alexander request + wizard response artifact in the sovereign room."""
if not self._check_available():
logger.warning("Cannot store Alexander artifact — MemPalace unavailable")
return None
try:
from nexus.mempalace.searcher import add_memory
from nexus.mempalace.conversation_artifacts import build_request_response_artifact
artifact = build_request_response_artifact(
requester=requester,
responder=self.agent_name,
request_text=request_text,
response_text=response_text,
source=source,
)
extra_metadata = dict(artifact.metadata)
if metadata:
extra_metadata.update(metadata)
doc_id = add_memory(
text=artifact.text,
room=artifact.room,
wing=self.wing,
palace_path=self.palace_path,
source_file=source,
extra_metadata=extra_metadata,
)
logger.debug("Stored Alexander request/response artifact in sovereign room")
return doc_id
except Exception as e:
logger.warning(f"Failed to store Alexander artifact: {e}")
return None
def write_diary(
self,
summary: Optional[str] = None,

122
app.js
View File

@@ -83,6 +83,7 @@ let workshopPanelCanvas = null;
let workshopScanMat = null;
let workshopPanelRefreshTimer = 0;
let lastFocusedPortal = null;
let portalHealthTimer = null;
// ═══ VISITOR / OPERATOR MODE ═══
let uiMode = 'visitor'; // 'visitor' | 'operator'
@@ -714,6 +715,10 @@ async function init() {
camera = new THREE.PerspectiveCamera(65, window.innerWidth / window.innerHeight, 0.1, 1000);
camera.position.copy(playerPos);
// Initialize avatar and LOD systems
if (window.AvatarCustomization) window.AvatarCustomization.init(scene, camera);
if (window.LODSystem) window.LODSystem.init(scene, camera);
updateLoad(20);
createSkybox();
@@ -1532,6 +1537,7 @@ function createPortals(data) {
const portal = createPortal(config);
portals.push(portal);
});
startPortalHealthChecks();
}
function createPortal(config) {
@@ -1670,6 +1676,19 @@ function createPortal(config) {
swirl,
pSystem,
light,
labelCanvas,
labelContext: lctx,
labelTexture: labelTex,
labelMesh,
baseState: window.PortalHealth
? window.PortalHealth.captureBaseState(config)
: {
status: config.status || 'online',
blocked_reason: config.blocked_reason ?? null,
interaction_ready: config.interaction_ready !== false,
action_label: config.destination?.action_label || 'ENTER',
},
health: null,
customElements: {}
};
@@ -1750,6 +1769,86 @@ function createPortal(config) {
return portalObj;
}
function updatePortalLabelTexture(portal, runtimeState) {
if (!portal?.labelContext || !portal?.labelCanvas || !portal?.labelTexture) return;
const ctx = portal.labelContext;
const canvas = portal.labelCanvas;
const portalColor = new THREE.Color(portal.config.color);
ctx.clearRect(0, 0, canvas.width, canvas.height);
ctx.font = 'bold 32px "Orbitron", sans-serif';
ctx.fillStyle = '#' + portalColor.getHexString();
ctx.textAlign = 'center';
ctx.fillText(`${portal.config.name.toUpperCase()}`, 256, 36);
ctx.font = 'bold 18px "Orbitron", sans-serif';
if (runtimeState && runtimeState.online === false) {
ctx.fillStyle = '#ff4466';
ctx.fillText('OFFLINE', 256, 68);
} else if (portal.config.role) {
const roleColors = { timmy: '#4af0c0', reflex: '#ff4466', pilot: '#ffd700', creative: '#ff00ff' };
ctx.fillStyle = roleColors[portal.config.role] || '#888888';
ctx.fillText(portal.config.role.toUpperCase(), 256, 68);
}
portal.labelTexture.needsUpdate = true;
}
function setPortalVisualIntensity(portal, intensity) {
const factor = Math.max(0.12, Math.min(1, intensity));
portal.group.traverse(obj => {
if (obj.isPointLight) {
if (obj.userData.baseIntensity == null) obj.userData.baseIntensity = obj.intensity;
obj.intensity = obj.userData.baseIntensity * factor;
return;
}
const materials = obj.material ? (Array.isArray(obj.material) ? obj.material : [obj.material]) : [];
materials.forEach(mat => {
if (mat.opacity != null) {
if (mat.userData.baseOpacity == null) mat.userData.baseOpacity = mat.opacity;
mat.opacity = mat.userData.baseOpacity * factor;
}
if (mat.emissiveIntensity != null) {
if (mat.userData.baseEmissiveIntensity == null) mat.userData.baseEmissiveIntensity = mat.emissiveIntensity;
mat.emissiveIntensity = mat.userData.baseEmissiveIntensity * factor;
}
if (mat.transparent != null && factor < 1) mat.transparent = true;
});
});
}
function setPortalRuntimeState(portal, runtimeState) {
portal.health = runtimeState;
portal.config.status = runtimeState.status;
portal.config.blocked_reason = runtimeState.blocked_reason;
portal.config.interaction_ready = runtimeState.interaction_ready;
if (portal.config.destination) {
portal.config.destination.action_label = runtimeState.actionLabel;
}
setPortalVisualIntensity(portal, runtimeState.online === false ? 0.2 : 1);
updatePortalLabelTexture(portal, runtimeState);
}
async function runPortalHealthChecks() {
if (!window.PortalHealth || portals.length === 0) return;
await Promise.all(portals.map(async (portal) => {
const probe = await window.PortalHealth.probePortalHealth(portal.config);
const runtimeState = window.PortalHealth.computePortalRuntimeState(portal.config, portal.baseState, probe);
setPortalRuntimeState(portal, runtimeState);
}));
if (atlasOverlayActive) populateAtlas();
if (activePortal) checkPortalProximity();
}
function startPortalHealthChecks() {
if (!window.PortalHealth || portalHealthTimer) return;
runPortalHealthChecks();
portalHealthTimer = setInterval(runPortalHealthChecks, window.PortalHealth.DEFAULT_HEALTH_INTERVAL_MS);
}
// ═══ PARTICLES ═══
function createParticles() {
const count = particleCount(1500);
@@ -3044,8 +3143,14 @@ function checkPortalProximity() {
activePortal = closest;
const hint = document.getElementById('portal-hint');
const hintLabel = document.getElementById('portal-hint-label');
if (activePortal) {
const runtimeState = activePortal.health || {
online: activePortal.config.status !== 'offline',
tooltipText: `Enter ${activePortal.config.name}`,
};
document.getElementById('portal-hint-name').textContent = activePortal.config.name;
if (hintLabel) hintLabel.textContent = runtimeState.online ? 'Enter' : 'Offline';
hint.style.display = 'flex';
} else {
hint.style.display = 'none';
@@ -3059,8 +3164,13 @@ function activatePortal(portal) {
const descDisplay = document.getElementById('portal-desc-display');
const redirectBox = document.getElementById('portal-redirect-box');
const errorBox = document.getElementById('portal-error-box');
const errorMsg = document.querySelector('#portal-error-box .portal-error-msg');
const timerDisplay = document.getElementById('portal-timer');
const statusDot = document.getElementById('portal-status-dot');
const runtimeState = portal.health || {
online: portal.config.status !== 'offline',
blocked_reason: portal.config.blocked_reason,
};
nameDisplay.textContent = portal.config.name.toUpperCase();
descDisplay.textContent = portal.config.description;
@@ -3069,6 +3179,13 @@ function activatePortal(portal) {
overlay.style.display = 'flex';
if (!runtimeState.online) {
redirectBox.style.display = 'none';
errorBox.style.display = 'block';
if (errorMsg) errorMsg.textContent = runtimeState.blocked_reason || 'OFFLINE';
return;
}
if (portal.config.destination && portal.config.destination.url) {
redirectBox.style.display = 'block';
errorBox.style.display = 'none';
@@ -3087,6 +3204,7 @@ function activatePortal(portal) {
} else {
redirectBox.style.display = 'none';
errorBox.style.display = 'block';
if (errorMsg) errorMsg.textContent = 'DESTINATION NOT YET LINKED';
}
}
@@ -3557,6 +3675,10 @@ function gameLoop() {
if (composer) { composer.render(); } else { renderer.render(scene, camera); }
// Update avatar and LOD systems
if (window.AvatarCustomization && playerPos) window.AvatarCustomization.update(playerPos);
if (window.LODSystem && playerPos) window.LODSystem.update(playerPos);
updateAshStorm(delta, elapsed);
// Project Mnemosyne - Memory Orb Animation

View File

@@ -1,72 +0,0 @@
# Duplicate PR Prevention
## Problem
The burn loop creates duplicate PRs for the same issue because it doesn't check for existing PRs before creating new ones.
## Solution
Two scripts:
### 1. Preflight Check (`scripts/preflight-pr-check.sh`)
Run BEFORE creating a PR:
```bash
./scripts/preflight-pr-check.sh 1128
```
Output if PRs exist:
```
🚫 BLOCKED: 2 existing PR(s) for issue #1128
Existing PRs:
#1458: feat: Close duplicate PRs for issue #1128
Branch: dawn/1128-1776130053
URL: https://...
Options:
1. Review and merge an existing PR
2. Close duplicates and proceed
3. Use --force to bypass (NOT RECOMMENDED)
```
Exit code 1 = blocked. Exit code 0 = safe to proceed.
### 2. Cleanup Script (`scripts/cleanup-duplicate-prs.sh`)
Close duplicate PRs:
```bash
# Dry run (show what would be closed)
./scripts/cleanup-duplicate-prs.sh 1128
# Actually close duplicates (keeps oldest)
./scripts/cleanup-duplicate-prs.sh 1128 --close
```
## Integration
### In burn loop
Add preflight check before PR creation:
```bash
# Before: git push && curl ... /pulls
./scripts/preflight-pr-check.sh $ISSUE_NUM || exit 1
```
### In CI
Add as a GitHub/Gitea Actions check:
```yaml
- name: Check for duplicate PRs
run: ./scripts/preflight-pr-check.sh ${{ github.event.issue.number }}
```
## Environment Variables
- `GITEA_TOKEN` — API token (default: reads from `~/.config/gitea/token`)
- `GITEA_URL` — Forge URL (default: `https://forge.alexanderwhitestone.com`)
- `GITEA_REPO` — Repository (default: `Timmy_Foundation/the-nexus`)

View File

@@ -236,7 +236,7 @@
<!-- Portal Hint -->
<div id="portal-hint" class="portal-hint" style="display:none;">
<div class="portal-hint-key">F</div>
<div class="portal-hint-text">Enter <span id="portal-hint-name"></span></div>
<div class="portal-hint-text"><span id="portal-hint-label">Enter</span> <span id="portal-hint-name"></span></div>
</div>
<!-- Vision Hint -->
@@ -394,7 +394,10 @@
<div id="memory-inspect-panel" class="memory-inspect-panel" style="display:none;" aria-label="Memory Inspect Panel"></div>
<div id="memory-connections-panel" class="memory-connections-panel" style="display:none;" aria-label="Memory Connections Panel"></div>
<script src="./portal-health.js"></script>
<script src="./boot.js"></script>
<script src="./avatar-customization.js"></script>
<script src="./lod-system.js"></script>
<script>
function openMemoryFilter() { renderFilterList(); document.getElementById('memory-filter').style.display = 'flex'; }
function closeMemoryFilter() { document.getElementById('memory-filter').style.display = 'none'; }

186
lod-system.js Normal file
View File

@@ -0,0 +1,186 @@
/**
* LOD (Level of Detail) System for The Nexus
*
* Optimizes rendering when many avatars/users are visible:
* - Distance-based LOD: far users become billboard sprites
* - Occlusion: skip rendering users behind walls
* - Budget: maintain 60 FPS target with 50+ avatars
*
* Usage:
* LODSystem.init(scene, camera);
* LODSystem.registerAvatar(avatarMesh, userId);
* LODSystem.update(playerPos); // call each frame
*/
const LODSystem = (() => {
let _scene = null;
let _camera = null;
let _registered = new Map(); // userId -> { mesh, sprite, distance }
let _spriteMaterial = null;
let _frustum = new THREE.Frustum();
let _projScreenMatrix = new THREE.Matrix4();
// Thresholds
const LOD_NEAR = 15; // Full mesh within 15 units
const LOD_FAR = 40; // Billboard beyond 40 units
const LOD_CULL = 80; // Don't render beyond 80 units
const SPRITE_SIZE = 1.2;
function init(sceneRef, cameraRef) {
_scene = sceneRef;
_camera = cameraRef;
// Create shared sprite material
const canvas = document.createElement('canvas');
canvas.width = 64;
canvas.height = 64;
const ctx = canvas.getContext('2d');
// Simple avatar indicator: colored circle
ctx.fillStyle = '#00ffcc';
ctx.beginPath();
ctx.arc(32, 32, 20, 0, Math.PI * 2);
ctx.fill();
ctx.fillStyle = '#0a0f1a';
ctx.beginPath();
ctx.arc(32, 28, 8, 0, Math.PI * 2); // head
ctx.fill();
const texture = new THREE.CanvasTexture(canvas);
_spriteMaterial = new THREE.SpriteMaterial({
map: texture,
transparent: true,
depthTest: true,
sizeAttenuation: true,
});
console.log('[LODSystem] Initialized');
}
function registerAvatar(avatarMesh, userId, color) {
// Create billboard sprite for this avatar
const spriteMat = _spriteMaterial.clone();
if (color) {
// Tint sprite to match avatar color
const canvas = document.createElement('canvas');
canvas.width = 64;
canvas.height = 64;
const ctx = canvas.getContext('2d');
ctx.fillStyle = color;
ctx.beginPath();
ctx.arc(32, 32, 20, 0, Math.PI * 2);
ctx.fill();
ctx.fillStyle = '#0a0f1a';
ctx.beginPath();
ctx.arc(32, 28, 8, 0, Math.PI * 2);
ctx.fill();
spriteMat.map = new THREE.CanvasTexture(canvas);
spriteMat.map.needsUpdate = true;
}
const sprite = new THREE.Sprite(spriteMat);
sprite.scale.set(SPRITE_SIZE, SPRITE_SIZE, 1);
sprite.visible = false;
_scene.add(sprite);
_registered.set(userId, {
mesh: avatarMesh,
sprite: sprite,
distance: Infinity,
});
}
function unregisterAvatar(userId) {
const entry = _registered.get(userId);
if (entry) {
_scene.remove(entry.sprite);
entry.sprite.material.dispose();
_registered.delete(userId);
}
}
function setSpriteColor(userId, color) {
const entry = _registered.get(userId);
if (!entry) return;
const canvas = document.createElement('canvas');
canvas.width = 64;
canvas.height = 64;
const ctx = canvas.getContext('2d');
ctx.fillStyle = color;
ctx.beginPath();
ctx.arc(32, 32, 20, 0, Math.PI * 2);
ctx.fill();
ctx.fillStyle = '#0a0f1a';
ctx.beginPath();
ctx.arc(32, 28, 8, 0, Math.PI * 2);
ctx.fill();
entry.sprite.material.map = new THREE.CanvasTexture(canvas);
entry.sprite.material.map.needsUpdate = true;
}
function update(playerPos) {
if (!_camera) return;
// Update frustum for culling
_projScreenMatrix.multiplyMatrices(
_camera.projectionMatrix,
_camera.matrixWorldInverse
);
_frustum.setFromProjectionMatrix(_projScreenMatrix);
_registered.forEach((entry, userId) => {
if (!entry.mesh) return;
const meshPos = entry.mesh.position;
const distance = playerPos.distanceTo(meshPos);
entry.distance = distance;
// Beyond cull distance: hide everything
if (distance > LOD_CULL) {
entry.mesh.visible = false;
entry.sprite.visible = false;
return;
}
// Check if in camera frustum
const inFrustum = _frustum.containsPoint(meshPos);
if (!inFrustum) {
entry.mesh.visible = false;
entry.sprite.visible = false;
return;
}
// LOD switching
if (distance <= LOD_NEAR) {
// Near: full mesh
entry.mesh.visible = true;
entry.sprite.visible = false;
} else if (distance <= LOD_FAR) {
// Mid: mesh with reduced detail (keep mesh visible)
entry.mesh.visible = true;
entry.sprite.visible = false;
} else {
// Far: billboard sprite
entry.mesh.visible = false;
entry.sprite.visible = true;
entry.sprite.position.copy(meshPos);
entry.sprite.position.y += 1.2; // above avatar center
}
});
}
function getStats() {
let meshCount = 0;
let spriteCount = 0;
let culledCount = 0;
_registered.forEach(entry => {
if (entry.mesh.visible) meshCount++;
else if (entry.sprite.visible) spriteCount++;
else culledCount++;
});
return { total: _registered.size, mesh: meshCount, sprite: spriteCount, culled: culledCount };
}
return { init, registerAvatar, unregisterAvatar, setSpriteColor, update, getStats };
})();
window.LODSystem = LODSystem;

View File

@@ -62,6 +62,15 @@ core_rooms:
- proof-of-concept code snippets
- benchmark data
- key: sovereign
label: Sovereign
purpose: Artifacts of Alexander Whitestone's requests, directives, and wizard responses
examples:
- dated request/response artifacts
- conversation summaries with speaker tags
- directive ledgers
- response follow-through notes
optional_rooms:
- key: evennia
label: Evennia
@@ -98,15 +107,6 @@ optional_rooms:
purpose: Catch-all for artefacts not yet assigned to a named room
wizards: ["*"]
- key: sovereign
label: Sovereign
purpose: Artifacts of Alexander Whitestone's requests, directives, and conversation history
wizards: ["*"]
conventions:
naming: "YYYY-MM-DD_HHMMSS_<topic>.md"
index: "INDEX.md"
description: "Each artifact is a dated record of a request from Alexander and the wizard's response. The running INDEX.md provides a chronological catalog."
# Tunnel routing table
# Defines which room pairs are connected across wizard wings.
# A tunnel lets `recall <query> --fleet` search both wings at once.

View File

@@ -13,6 +13,12 @@ from __future__ import annotations
from nexus.mempalace.config import MEMPALACE_PATH, FLEET_WING
from nexus.mempalace.searcher import search_memories, add_memory, MemPalaceResult
from nexus.mempalace.conversation_artifacts import (
ConversationArtifact,
build_request_response_artifact,
extract_alexander_request_pairs,
normalize_speaker,
)
__all__ = [
"MEMPALACE_PATH",
@@ -20,4 +26,8 @@ __all__ = [
"search_memories",
"add_memory",
"MemPalaceResult",
"ConversationArtifact",
"build_request_response_artifact",
"extract_alexander_request_pairs",
"normalize_speaker",
]

View File

@@ -40,6 +40,7 @@ CORE_ROOMS: list[str] = [
"nexus", # reports, docs, KT
"issues", # tickets, backlog
"experiments", # prototypes, spikes
"sovereign", # Alexander request/response artifacts
]
# ── ChromaDB collection name ──────────────────────────────────────────────────

View File

@@ -0,0 +1,122 @@
"""Helpers for preserving Alexander request/response artifacts in MemPalace.
This module provides a small, typed bridge between raw conversation turns and
MemPalace drawers stored in the shared `sovereign` room. The goal is not to
solve all future speaker-tagging needs at once; it gives the Nexus one
canonical artifact shape that other miners and bridges can reuse.
"""
from __future__ import annotations
from dataclasses import dataclass, field
from datetime import datetime, timezone
from typing import Iterable
_ALEXANDER_ALIASES = {
"alexander",
"alexander whitestone",
"rockachopa",
"triptimmy",
}
@dataclass(frozen=True)
class ConversationArtifact:
requester: str
responder: str
request_text: str
response_text: str
room: str = "sovereign"
timestamp: str = field(default_factory=lambda: datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ"))
metadata: dict = field(default_factory=dict)
@property
def text(self) -> str:
return (
f"# Conversation Artifact\n\n"
f"## Alexander Request\n{self.request_text.strip()}\n\n"
f"## Wizard Response\n{self.response_text.strip()}\n"
)
def normalize_speaker(name: str | None) -> str:
cleaned = " ".join((name or "").strip().lower().split())
if cleaned in _ALEXANDER_ALIASES:
return "alexander"
return cleaned.replace(" ", "_") or "unknown"
def build_request_response_artifact(
*,
requester: str,
responder: str,
request_text: str,
response_text: str,
source: str = "",
timestamp: str | None = None,
request_timestamp: str | None = None,
response_timestamp: str | None = None,
) -> ConversationArtifact:
requester_slug = normalize_speaker(requester)
responder_slug = normalize_speaker(responder)
ts = timestamp or datetime.now(timezone.utc).strftime("%Y-%m-%dT%H:%M:%SZ")
metadata = {
"artifact_type": "alexander_request_response",
"requester": requester_slug,
"responder": responder_slug,
"speaker_tags": [f"speaker:{requester_slug}", f"speaker:{responder_slug}"],
"source": source,
"timestamp": ts,
}
if request_timestamp:
metadata["request_timestamp"] = request_timestamp
if response_timestamp:
metadata["response_timestamp"] = response_timestamp
return ConversationArtifact(
requester=requester_slug,
responder=responder_slug,
request_text=request_text,
response_text=response_text,
timestamp=ts,
metadata=metadata,
)
def extract_alexander_request_pairs(
turns: Iterable[dict],
*,
responder: str,
source: str = "",
) -> list[ConversationArtifact]:
responder_slug = normalize_speaker(responder)
pending_request: dict | None = None
artifacts: list[ConversationArtifact] = []
for turn in turns:
speaker = normalize_speaker(
turn.get("speaker") or turn.get("username") or turn.get("author") or turn.get("name")
)
text = (turn.get("text") or turn.get("content") or "").strip()
if not text:
continue
if speaker == "alexander":
pending_request = turn
continue
if speaker == responder_slug and pending_request is not None:
artifacts.append(
build_request_response_artifact(
requester="alexander",
responder=responder_slug,
request_text=(pending_request.get("text") or pending_request.get("content") or "").strip(),
response_text=text,
source=source,
request_timestamp=pending_request.get("timestamp"),
response_timestamp=turn.get("timestamp"),
timestamp=turn.get("timestamp") or pending_request.get("timestamp"),
)
)
pending_request = None
return artifacts

135
portal-health.js Normal file
View File

@@ -0,0 +1,135 @@
(function (global) {
const DEFAULT_HEALTH_INTERVAL_MS = 5 * 60 * 1000;
const DEFAULT_TIMEOUT_MS = 3000;
function derivePortalHealthTarget(config = {}) {
if (typeof config.health_check_url === 'string' && config.health_check_url.trim()) {
return config.health_check_url.trim();
}
const destinationUrl = config?.destination?.url;
if (typeof destinationUrl === 'string' && destinationUrl.trim()) {
return destinationUrl.trim();
}
return null;
}
function shouldProbePortalHealth(config = {}) {
const target = derivePortalHealthTarget(config);
return typeof target === 'string' && /^https?:\/\//i.test(target);
}
function captureBaseState(config = {}) {
return {
status: config.status || 'online',
blocked_reason: config.blocked_reason ?? null,
interaction_ready: config.interaction_ready !== false,
action_label: config?.destination?.action_label || 'ENTER',
};
}
function computePortalRuntimeState(config = {}, baseState = captureBaseState(config), probe = {}) {
const tracked = shouldProbePortalHealth(config);
const portalName = config.name || 'Portal';
if (!tracked) {
return {
tracked: false,
online: true,
status: baseState.status,
blocked_reason: baseState.blocked_reason,
interaction_ready: baseState.interaction_ready,
actionLabel: baseState.action_label,
tooltipText: `Enter ${portalName}`,
};
}
const online = probe.online !== false;
if (!online) {
const reason = probe.reason || 'Offline';
return {
tracked: true,
online: false,
status: 'offline',
blocked_reason: reason,
interaction_ready: false,
actionLabel: 'OFFLINE',
tooltipText: 'Offline',
};
}
return {
tracked: true,
online: true,
status: baseState.status,
blocked_reason: baseState.blocked_reason,
interaction_ready: baseState.interaction_ready,
actionLabel: baseState.action_label,
tooltipText: `Enter ${portalName}`,
};
}
function describeProbeFailure(error) {
if (!error) return 'Offline';
if (error.name === 'AbortError') return 'Offline';
return error.message || 'Offline';
}
async function probePortalHealth(config = {}, { fetchImpl = global.fetch, timeoutMs = DEFAULT_TIMEOUT_MS } = {}) {
const target = derivePortalHealthTarget(config);
if (!shouldProbePortalHealth(config)) {
return { online: true, skipped: true, target };
}
if (typeof fetchImpl !== 'function') {
return { online: false, reason: 'Offline', target };
}
const controller = typeof AbortController !== 'undefined' ? new AbortController() : null;
const timeout = controller
? setTimeout(() => controller.abort(), timeoutMs)
: null;
try {
const response = await fetchImpl(target, {
method: 'HEAD',
mode: 'no-cors',
cache: 'no-store',
signal: controller ? controller.signal : undefined,
});
if (timeout) clearTimeout(timeout);
const online = !response || response.type === 'opaque' || response.ok || response.status < 500;
return {
online,
reason: online ? null : `Offline (${response.status || 'unreachable'})`,
target,
};
} catch (error) {
if (timeout) clearTimeout(timeout);
return {
online: false,
reason: describeProbeFailure(error),
target,
};
}
}
const api = {
DEFAULT_HEALTH_INTERVAL_MS,
DEFAULT_TIMEOUT_MS,
derivePortalHealthTarget,
shouldProbePortalHealth,
captureBaseState,
computePortalRuntimeState,
probePortalHealth,
};
if (typeof module !== 'undefined' && module.exports) {
module.exports = api;
}
global.PortalHealth = api;
})(typeof window !== 'undefined' ? window : globalThis);

View File

@@ -129,13 +129,22 @@
"type": "harness",
"params": {
"mode": "creative"
}
},
"action_label": "Enter Workshop"
},
"agents_present": [
"timmy",
"kimi"
],
"interaction_ready": true
"interaction_ready": true,
"portal_type": "creative-harness",
"world_category": "creative",
"environment": "production",
"access_mode": "visitor",
"readiness_state": "online",
"blocked_reason": null,
"telemetry_source": "workshop",
"owner": "Timmy"
},
{
"id": "archive",
@@ -157,12 +166,21 @@
"type": "harness",
"params": {
"mode": "read"
}
},
"action_label": "Enter Archive"
},
"agents_present": [
"claude"
],
"interaction_ready": true
"interaction_ready": true,
"portal_type": "knowledge-harness",
"world_category": "archive",
"environment": "production",
"access_mode": "visitor",
"readiness_state": "online",
"blocked_reason": null,
"telemetry_source": "archive",
"owner": "Timmy"
},
{
"id": "chapel",
@@ -184,10 +202,19 @@
"type": "harness",
"params": {
"mode": "meditation"
}
},
"action_label": "Enter Chapel"
},
"agents_present": [],
"interaction_ready": true
"interaction_ready": true,
"portal_type": "sanctuary-harness",
"world_category": "sanctuary",
"environment": "production",
"access_mode": "visitor",
"readiness_state": "online",
"blocked_reason": null,
"telemetry_source": "chapel",
"owner": "Timmy"
},
{
"id": "courtyard",
@@ -209,13 +236,22 @@
"type": "harness",
"params": {
"mode": "social"
}
},
"action_label": "Enter Courtyard"
},
"agents_present": [
"timmy",
"perplexity"
],
"interaction_ready": true
"interaction_ready": true,
"portal_type": "social-harness",
"world_category": "social",
"environment": "production",
"access_mode": "visitor",
"readiness_state": "online",
"blocked_reason": null,
"telemetry_source": "courtyard",
"owner": "Reflex"
},
{
"id": "gate",
@@ -237,10 +273,19 @@
"type": "harness",
"params": {
"mode": "transit"
}
},
"action_label": "Enter Gate"
},
"agents_present": [],
"interaction_ready": false
"interaction_ready": false,
"portal_type": "transit-harness",
"world_category": "transit",
"environment": "production",
"access_mode": "operator",
"readiness_state": "standby",
"blocked_reason": null,
"telemetry_source": "gate",
"owner": "Reflex"
},
{
"id": "playground",
@@ -292,4 +337,4 @@
"agents_present": [],
"interaction_ready": true
}
]
]

View File

@@ -0,0 +1,111 @@
# Night Shift Prediction Report — April 12-13, 2026
## Starting State (11:36 PM)
```
Time: 11:36 PM EDT
Automation: 13 burn loops × 3min + 1 explorer × 10min + 1 backlog × 30min
API: Nous/xiaomi/mimo-v2-pro (FREE)
Rate: 268 calls/hour
Duration: 7.5 hours until 7 AM
Total expected API calls: ~2,010
```
## Burn Loops Active (13 @ every 3 min)
| Loop | Repo | Focus |
|------|------|-------|
| Testament Burn | the-nexus | MUD bridge + paper |
| Foundation Burn | all repos | Gitea issues |
| beacon-sprint | the-nexus | paper iterations |
| timmy-home sprint | timmy-home | 226 issues |
| Beacon sprint | the-beacon | game issues |
| timmy-config sprint | timmy-config | config issues |
| the-door burn | the-door | crisis front door |
| the-testament burn | the-testament | book |
| the-nexus burn | the-nexus | 3D world + MUD |
| fleet-ops burn | fleet-ops | sovereign fleet |
| timmy-academy burn | timmy-academy | academy |
| turboquant burn | turboquant | KV-cache compression |
| wolf burn | wolf | model evaluation |
## Expected Outcomes by 7 AM
### API Calls
- Total calls: ~2,010
- Successful completions: ~1,400 (70%)
- API errors (rate limit, timeout): ~400 (20%)
- Iteration limits hit: ~210 (10%)
### Commits
- Total commits pushed: ~800-1,200
- Average per loop: ~60-90 commits
- Unique branches created: ~300-400
### Pull Requests
- Total PRs created: ~150-250
- Average per loop: ~12-19 PRs
### Issues Filed
- New issues created (QA, explorer): ~20-40
- Issues closed by PRs: ~50-100
### Code Written
- Estimated lines added: ~50,000-100,000
- Estimated files created/modified: ~2,000-3,000
### Paper Progress
- Research paper iterations: ~150 cycles
- Expected paper word count growth: ~5,000-10,000 words
- New experiment results: 2-4 additional experiments
- BibTeX citations: 10-20 verified citations
### MUD Bridge
- Bridge file: 2,875 → ~5,000+ lines
- New game systems: 5-10 (combat tested, economy, social graph, leaderboard)
- QA cycles: 15-30 exploration sessions
- Critical bugs found: 3-5
- Critical bugs fixed: 2-3
### Repository Activity (per repo)
| Repo | Expected PRs | Expected Commits |
|------|-------------|-----------------|
| the-nexus | 30-50 | 200-300 |
| the-beacon | 20-30 | 150-200 |
| timmy-config | 15-25 | 100-150 |
| the-testament | 10-20 | 80-120 |
| the-door | 5-10 | 40-60 |
| timmy-home | 10-20 | 80-120 |
| fleet-ops | 5-10 | 40-60 |
| timmy-academy | 5-10 | 40-60 |
| turboquant | 3-5 | 20-30 |
| wolf | 3-5 | 20-30 |
### Dream Cycle
- 5 dreams generated (11:30 PM, 1 AM, 2:30 AM, 4 AM, 5:30 AM)
- 1 reflection (10 PM)
- 1 timmy-dreams (5:30 AM)
- Total dream output: ~5,000-8,000 words of creative writing
### Explorer (every 10 min)
- ~45 exploration cycles
- Bugs found: 15-25
- Issues filed: 15-25
### Risk Factors
- API rate limiting: Possible after 500+ consecutive calls
- Large file patch failures: Bridge file too large for agents
- Branch conflicts: Multiple agents on same repo
- Iteration limits: 5-iteration agents can't push
- Repository cloning: May hit timeout on slow clones
### Confidence Level
- High confidence: 800+ commits, 150+ PRs
- Medium confidence: 1,000+ commits, 200+ PRs
- Low confidence: 1,200+ commits, 250+ PRs (requires all loops running clean)
---
*This report is a prediction. The 7 AM morning report will compare actual results.*
*Generated: 2026-04-12 23:36 EDT*
*Author: Timmy (pre-shift prediction)*

View File

@@ -1,101 +1,170 @@
#!/usr/bin/env bash
# cleanup-duplicate-prs.sh — Close duplicate PRs for a given issue
# ═══════════════════════════════════════════════════════════════
# cleanup-duplicate-prs.sh — Identify and close duplicate open PRs
#
# This script identifies PRs that are duplicates (same issue number
# or very similar titles) and closes the older ones.
#
# Usage:
# ./scripts/cleanup-duplicate-prs.sh <issue_number> [--close]
# ./scripts/cleanup-duplicate-prs.sh [--dry-run] [--close]
#
# Without --close: dry run (show what would be closed)
# With --close: actually close the duplicates
# Options:
# --dry-run Show what would be done without making changes
# --close Actually close duplicate PRs (default is dry-run)
#
# Designed for issue #1128: Forge Cleanup
# ═══════════════════════════════════════════════════════════════
set -euo pipefail
ISSUE_NUM="${1:?Usage: cleanup-duplicate-prs.sh <issue_number> [--close]}"
CLOSE_MODE="${2:-}"
# ─── Configuration ──────────────────────────────────────────
GITEA_URL="${GITEA_URL:-https://forge.alexanderwhitestone.com}"
GITEA_TOKEN="${GITEA_TOKEN:-$(cat ~/.config/gitea/token 2>/dev/null || echo '')}"
REPO="${GITEA_REPO:-Timmy_Foundation/the-nexus}"
GITEA_TOKEN="${GITEA_TOKEN:?Set GITEA_TOKEN env var}"
REPO="${REPO:-Timmy_Foundation/the-nexus}"
DRY_RUN="${DRY_RUN:-true}"
if [ -z "$GITEA_TOKEN" ]; then
echo "ERROR: GITEA_TOKEN not set"
exit 1
fi
# Parse command line arguments
for arg in "$@"; do
case $arg in
--dry-run)
DRY_RUN="true"
;;
--close)
DRY_RUN="false"
;;
esac
done
REPO_API="${GITEA_URL}/api/v1/repos/${REPO}"
API="$GITEA_URL/api/v1"
AUTH="token $GITEA_TOKEN"
# Fetch open PRs
PRS=$(curl -sf -H "Authorization: token ${GITEA_TOKEN}" "${REPO_API}/pulls?state=open&limit=50" 2>/dev/null || echo '[]')
log() { echo "[$(date -u +%Y-%m-%dT%H:%M:%SZ)] $*"; }
# Find matching PRs
MATCHES=$(echo "$PRS" | python3 -c "
import json, sys
prs = json.load(sys.stdin)
issue = '${ISSUE_NUM}'
matches = []
for pr in prs:
title = pr.get('title', '')
body = pr.get('body', '')
ref = pr.get('head', {}).get('ref', '')
if f'#{issue}' in title or f'#{issue}' in body or issue in ref:
matches.append(pr)
json.dump(matches, sys.stdout)
" 2>/dev/null || echo '[]')
# ─── Fetch open PRs ────────────────────────────────────────
log "Checking open PRs for $REPO (dry_run: $DRY_RUN)"
COUNT=$(echo "$MATCHES" | python3 -c "import json,sys; print(len(json.load(sys.stdin)))" 2>/dev/null || echo '0')
OPEN_PRS=$(curl -s -H "$AUTH" "$API/repos/$REPO/pulls?state=open&limit=50")
if [ "$COUNT" -eq 0 ]; then
echo "No PRs found for issue #$ISSUE_NUM"
if [ -z "$OPEN_PRS" ] || [ "$OPEN_PRS" = "null" ]; then
log "No open PRs found or API error"
exit 0
fi
echo "Found $COUNT PR(s) for issue #$ISSUE_NUM:"
echo "$MATCHES" | python3 -c "
import json, sys
prs = json.load(sys.stdin)
for pr in prs:
print(f" #{pr['number']}: {pr['title']} [{pr['head']['ref']}]")
"
# Count PRs
PR_COUNT=$(echo "$OPEN_PRS" | jq length)
log "Found $PR_COUNT open PRs"
if [ "$COUNT" -le 1 ]; then
echo ""
echo "Only 1 PR found. No cleanup needed."
if [ "$PR_COUNT" -eq 0 ]; then
log "No open PRs to process"
exit 0
fi
# Keep the oldest PR, close the rest
echo "$MATCHES" | python3 -c "
import json, sys
prs = json.load(sys.stdin)
prs.sort(key=lambda p: p['number'])
keep = prs[0]
close = prs[1:]
print(f'KEEP: #{keep["number"]}: {keep["title"]}')
for pr in close:
print(f'CLOSE: #{pr["number"]}: {pr["title"]}')
"
# ─── Extract issue numbers from PR titles ──────────────────
# Create a temporary file for PR data
TEMP_FILE=$(mktemp)
echo "$OPEN_PRS" | jq -r '.[] | "\(.number)\t\(.title)\t\(.created_at)\t\(.head.ref)"' > "$TEMP_FILE"
if [ "$CLOSE_MODE" != "--close" ]; then ""
echo ""
echo "DRY RUN: Add --close to actually close duplicates"
exit 0
# Group PRs by issue number using temporary files
TEMP_DIR=$(mktemp -d)
trap "rm -rf $TEMP_DIR" EXIT
while IFS=$'\t' read -r pr_number pr_title pr_created pr_branch; do
# Extract issue number from title (look for #123 pattern)
if [[ $pr_title =~ \#([0-9]+) ]]; then
issue_num="${BASH_REMATCH[1]}"
echo "$pr_number,$pr_created,$pr_branch" >> "$TEMP_DIR/issue_$issue_num.txt"
fi
done < "$TEMP_FILE"
rm -f "$TEMP_FILE"
# ─── Identify and process duplicates ──────────────────────
DUPLICATES_FOUND=0
CLOSED_COUNT=0
for issue_file in "$TEMP_DIR"/issue_*.txt; do
[ -f "$issue_file" ] || continue
issue_num=$(basename "$issue_file" .txt | sed 's/issue_//')
pr_list=$(cat "$issue_file")
# Count PRs for this issue
pr_count=$(echo -n "$pr_list" | grep -c '^' || true)
if [ "$pr_count" -le 1 ]; then
continue # No duplicates
fi
log "Issue #$issue_num has $pr_count open PRs"
DUPLICATES_FOUND=$((DUPLICATES_FOUND + 1))
# Sort by creation date (oldest first)
sorted_prs=$(echo -n "$pr_list" | sort -t',' -k2)
# Keep the newest PR, close the rest
newest_pr=""
newest_date=""
while IFS=',' read -r pr_num pr_date pr_branch; do
if [ -z "$newest_date" ] || [[ "$pr_date" > "$newest_date" ]]; then
newest_pr="$pr_num"
newest_date="$pr_date"
fi
done <<< "$sorted_prs"
log "Keeping PR #$newest_pr (newest)"
# Close older PRs
while IFS=',' read -r pr_num pr_date pr_branch; do
if [ "$pr_num" = "$newest_pr" ]; then
continue # Skip the newest PR
fi
log "Closing duplicate PR #$pr_num for issue #$issue_num"
if [ "$DRY_RUN" = "true" ]; then
log "DRY RUN: Would close PR #$pr_num"
else
# Add a comment explaining why we're closing
comment_body="Closing as duplicate. PR #$newest_pr is newer and addresses the same issue (#$issue_num)."
curl -s -X POST -H "$AUTH" -H "Content-Type: application/json" -d "{\"body\": \"$comment_body\"}" "$API/repos/$REPO/issues/$pr_num/comments" > /dev/null
# Close the PR
curl -s -X PATCH -H "$AUTH" -H "Content-Type: application/json" -d '{"state": "closed"}' "$API/repos/$REPO/pulls/$pr_num" > /dev/null
log "Closed PR #$pr_num"
CLOSED_COUNT=$((CLOSED_COUNT + 1))
fi
done <<< "$sorted_prs"
done
# ─── Summary ──────────────────────────────────────────────
log "Cleanup complete:"
log " Duplicate issue groups found: $DUPLICATES_FOUND"
log " PRs closed: $CLOSED_COUNT"
log " Dry run: $DRY_RUN"
if [ "$DUPLICATES_FOUND" -eq 0 ]; then
log "No duplicate PRs found"
fi
# Close duplicates
echo "$MATCHES" | python3 -c "
import json, sys, urllib.request, os
prs = json.load(sys.stdin)
prs.sort(key=lambda p: p['number'])
token = '${GITEA_TOKEN}'
api = '${REPO_API}'
for pr in prs[1:]:
url = f'{api}/pulls/{pr["number"]}'
data = json.dumps({'state': 'closed'}).encode()
req = urllib.request.Request(url, data=data, headers={'Authorization': f'token {token}', 'Content-Type': 'application/json'}, method='PATCH')
try:
urllib.request.urlopen(req)
print(f'Closed PR #{pr["number"]}')
except Exception as e:
print(f'Error closing #{pr["number"]}: {e}')
"
# ─── Additional cleanup: Stale PRs ────────────────────────
# Check for PRs older than 30 days with no activity
log "Checking for stale PRs (older than 30 days)..."
echo ""
echo "Cleanup complete."
THIRTY_DAYS_AGO=$(date -u -v-30d +%Y-%m-%dT%H:%M:%SZ 2>/dev/null || date -u -d "30 days ago" +%Y-%m-%dT%H:%M:%SZ)
STALE_PRS=$(echo "$OPEN_PRS" | jq -r --arg cutoff "$THIRTY_DAYS_AGO" '.[] | select(.created_at < $cutoff) | "\(.number)\t\(.title)\t\(.created_at)"')
if [ -n "$STALE_PRS" ]; then
STALE_COUNT=$(echo -n "$STALE_PRS" | grep -c '^' || true)
log "Found $STALE_COUNT stale PRs (older than 30 days)"
echo "$STALE_PRS" | while IFS=$'\t' read -r pr_num pr_title pr_created; do
log "Stale PR #$pr_num: $pr_title (created: $pr_created)"
done
else
log "No stale PRs found"
fi
log "Script complete"

View File

@@ -1,82 +0,0 @@
#!/usr/bin/env bash
# preflight-pr-check.sh — Prevent duplicate PRs before creating them
#
# Usage:
# ./scripts/preflight-pr-check.sh <issue_number>
#
# Exit codes:
# 0 = safe to proceed (no existing PRs)
# 1 = BLOCKED (existing PRs found)
# 2 = error
set -euo pipefail
ISSUE_NUM="${1:?Usage: preflight-pr-check.sh <issue_number>}"
GITEA_URL="${GITEA_URL:-https://forge.alexanderwhitestone.com}"
GITEA_TOKEN="${GITEA_TOKEN:-$(cat ~/.config/gitea/token 2>/dev/null || echo '')}"
REPO="${GITEA_REPO:-Timmy_Foundation/the-nexus}"
if [ -z "$GITEA_TOKEN" ]; then
echo "ERROR: GITEA_TOKEN not set and ~/.config/gitea/token not found"
exit 2
fi
# Get repo info
REPO_API="${GITEA_URL}/api/v1/repos/${REPO}"
# Fetch open PRs
PRS=$(curl -sf -H "Authorization: token ${GITEA_TOKEN}" "${REPO_API}/pulls?state=open&limit=50" 2>/dev/null || echo '[]')
# Check for existing PRs referencing this issue
MATCHING_PRS=$(echo "$PRS" | python3 -c "
import json, sys
prs = json.load(sys.stdin)
issue = '${ISSUE_NUM}'
matches = []
for pr in prs:
title = pr.get('title', '')
body = pr.get('body', '')
ref = pr.get('head', {}).get('ref', '')
if f'#{issue}' in title or f'#{issue}' in body or issue in ref:
matches.append({
'number': pr['number'],
'title': title,
'branch': ref,
'url': pr.get('html_url', '')
})
json.dump(matches, sys.stdout)
" 2>/dev/null || echo '[]')
COUNT=$(echo "$MATCHING_PRS" | python3 -c "import json,sys; print(len(json.load(sys.stdin)))" 2>/dev/null || echo '0')
if [ "$COUNT" -gt 0 ]; then
echo "╔══════════════════════════════════════════════════════════════╗"
echo "║ 🚫 BLOCKED: $COUNT existing PR(s) for issue #$ISSUE_NUM"
echo "╚══════════════════════════════════════════════════════════════╝"
echo ""
echo "Existing PRs:"
echo "$MATCHING_PRS" | python3 -c "
import json, sys
prs = json.load(sys.stdin)
for pr in prs:
print(f" #{pr['number']}: {pr['title']}")
print(f" Branch: {pr['branch']}")
print(f" URL: {pr['url']}")
print()
"
echo "Options:"
echo " 1. Review and merge an existing PR"
echo " 2. Close duplicates and proceed"
echo " 3. Use --force to bypass (NOT RECOMMENDED)"
echo ""
if [ "${2:-}" = "--force" ]; then
echo "⚠️ --force flag detected. Bypassing duplicate check."
exit 0
fi
exit 1
else
echo "✅ Safe to proceed: No existing PRs for issue #$ISSUE_NUM"
exit 0
fi

View File

@@ -4,48 +4,61 @@ Sync branch protection rules from .gitea/branch-protection/*.yml to Gitea.
Correctly uses the Gitea 1.25+ API (not GitHub-style).
"""
from __future__ import annotations
import json
import os
import sys
import json
import urllib.request
from pathlib import Path
import yaml
GITEA_URL = os.getenv("GITEA_URL", "https://forge.alexanderwhitestone.com")
GITEA_TOKEN = os.getenv("GITEA_TOKEN", "")
ORG = "Timmy_Foundation"
CONFIG_DIR = ".gitea/branch-protection"
PROJECT_ROOT = Path(__file__).resolve().parent.parent
CONFIG_DIR = PROJECT_ROOT / ".gitea" / "branch-protection"
def api_request(method: str, path: str, payload: dict | None = None) -> dict:
url = f"{GITEA_URL}/api/v1{path}"
data = json.dumps(payload).encode() if payload else None
req = urllib.request.Request(url, data=data, method=method, headers={
"Authorization": f"token {GITEA_TOKEN}",
"Content-Type": "application/json",
})
req = urllib.request.Request(
url,
data=data,
method=method,
headers={
"Authorization": f"token {GITEA_TOKEN}",
"Content-Type": "application/json",
},
)
with urllib.request.urlopen(req, timeout=30) as resp:
return json.loads(resp.read().decode())
def apply_protection(repo: str, rules: dict) -> bool:
branch = rules.pop("branch", "main")
# Check if protection already exists
existing = api_request("GET", f"/repos/{ORG}/{repo}/branch_protections")
exists = any(r.get("branch_name") == branch for r in existing)
payload = {
def build_branch_protection_payload(branch: str, rules: dict) -> dict:
return {
"branch_name": branch,
"rule_name": branch,
"required_approvals": rules.get("required_approvals", 1),
"block_on_rejected_reviews": rules.get("block_on_rejected_reviews", True),
"dismiss_stale_approvals": rules.get("dismiss_stale_approvals", True),
"block_deletions": rules.get("block_deletions", True),
"block_force_push": rules.get("block_force_push", True),
"block_force_push": rules.get("block_force_push", rules.get("block_force_pushes", True)),
"block_admin_merge_override": rules.get("block_admin_merge_override", True),
"enable_status_check": rules.get("require_ci_to_merge", False),
"status_check_contexts": rules.get("status_check_contexts", []),
"block_on_outdated_branch": rules.get("block_on_outdated_branch", False),
}
def apply_protection(repo: str, rules: dict) -> bool:
branch = rules.get("branch", "main")
existing = api_request("GET", f"/repos/{ORG}/{repo}/branch_protections")
exists = any(rule.get("branch_name") == branch for rule in existing)
payload = build_branch_protection_payload(branch, rules)
try:
if exists:
api_request("PATCH", f"/repos/{ORG}/{repo}/branch_protections/{branch}", payload)
@@ -53,8 +66,8 @@ def apply_protection(repo: str, rules: dict) -> bool:
api_request("POST", f"/repos/{ORG}/{repo}/branch_protections", payload)
print(f"{repo}:{branch} synced")
return True
except Exception as e:
print(f"{repo}:{branch} failed: {e}")
except Exception as exc:
print(f"{repo}:{branch} failed: {exc}")
return False
@@ -62,15 +75,18 @@ def main() -> int:
if not GITEA_TOKEN:
print("ERROR: GITEA_TOKEN not set")
return 1
if not CONFIG_DIR.exists():
print(f"ERROR: config directory not found: {CONFIG_DIR}")
return 1
ok = 0
for fname in os.listdir(CONFIG_DIR):
if not fname.endswith(".yml"):
continue
repo = fname[:-4]
with open(os.path.join(CONFIG_DIR, fname)) as f:
cfg = yaml.safe_load(f)
if apply_protection(repo, cfg.get("rules", {})):
for cfg_path in sorted(CONFIG_DIR.glob("*.yml")):
repo = cfg_path.stem
with cfg_path.open() as fh:
cfg = yaml.safe_load(fh) or {}
rules = cfg.get("rules", {})
rules.setdefault("branch", cfg.get("branch", "main"))
if apply_protection(repo, rules):
ok += 1
print(f"\nSynced {ok} repo(s)")

View File

@@ -0,0 +1,21 @@
const { test } = require('node:test');
const assert = require('node:assert/strict');
const fs = require('node:fs');
const path = require('node:path');
const ROOT = path.resolve(__dirname, '..');
test('index bootstraps portal-health helper before app startup', () => {
const html = fs.readFileSync(path.join(ROOT, 'index.html'), 'utf8');
assert.match(html, /portal-health\.js/);
});
test('app wires recurring portal health checks and offline tooltip behavior', () => {
const source = fs.readFileSync(path.join(ROOT, 'app.js'), 'utf8');
assert.match(source, /startPortalHealthChecks\(/);
assert.match(source, /runPortalHealthChecks\(/);
assert.match(source, /PortalHealth\.DEFAULT_HEALTH_INTERVAL_MS/);
assert.match(source, /tooltipText/);
assert.match(source, /Offline/);
assert.match(source, /setPortalRuntimeState\(/);
});

View File

@@ -0,0 +1,69 @@
const { test } = require('node:test');
const assert = require('node:assert/strict');
const {
DEFAULT_HEALTH_INTERVAL_MS,
derivePortalHealthTarget,
shouldProbePortalHealth,
computePortalRuntimeState,
} = require('../portal-health.js');
test('portal health checks default to five minutes', () => {
assert.equal(DEFAULT_HEALTH_INTERVAL_MS, 5 * 60 * 1000);
});
test('portal health target prefers explicit health URL then destination URL', () => {
assert.equal(
derivePortalHealthTarget({
health_check_url: 'https://status.timmy.foundation/health',
destination: { url: 'https://portal.timmy.foundation' },
}),
'https://status.timmy.foundation/health'
);
assert.equal(
derivePortalHealthTarget({ destination: { url: 'https://portal.timmy.foundation' } }),
'https://portal.timmy.foundation'
);
assert.equal(
derivePortalHealthTarget({ destination: { url: null } }),
null
);
});
test('portal health probe eligibility only tracks portals with external URLs', () => {
assert.equal(shouldProbePortalHealth({ destination: { url: 'https://portal.timmy.foundation' } }), true);
assert.equal(shouldProbePortalHealth({ destination: { url: './local.html' } }), false);
assert.equal(shouldProbePortalHealth({ destination: { url: null } }), false);
});
test('runtime state marks offline portals unavailable and restores base state when healthy', () => {
const config = {
name: 'Workshop',
status: 'online',
blocked_reason: null,
interaction_ready: true,
destination: { url: 'https://workshop.timmy.foundation', action_label: 'Enter Workshop' },
};
const baseState = {
status: 'online',
blocked_reason: null,
interaction_ready: true,
action_label: 'Enter Workshop',
};
const offline = computePortalRuntimeState(config, baseState, { online: false, reason: 'Offline' });
assert.equal(offline.status, 'offline');
assert.equal(offline.interaction_ready, false);
assert.equal(offline.blocked_reason, 'Offline');
assert.equal(offline.tooltipText, 'Offline');
assert.equal(offline.actionLabel, 'OFFLINE');
const restored = computePortalRuntimeState(config, baseState, { online: true });
assert.equal(restored.status, 'online');
assert.equal(restored.interaction_ready, true);
assert.equal(restored.blocked_reason, null);
assert.equal(restored.tooltipText, 'Enter Workshop');
assert.equal(restored.actionLabel, 'Enter Workshop');
});

View File

@@ -20,6 +20,7 @@ from agent.memory import (
SessionTranscript,
create_agent_memory,
)
from nexus.mempalace.conversation_artifacts import ConversationArtifact
from agent.memory_hooks import MemoryHooks
@@ -184,6 +185,24 @@ class TestAgentMemory:
doc_id = mem.write_diary()
assert doc_id is None # MemPalace unavailable
def test_remember_alexander_request_response_uses_sovereign_room(self):
mem = AgentMemory(agent_name="allegro")
mem._available = True
with patch("nexus.mempalace.searcher.add_memory", return_value="doc-123") as add_memory:
doc_id = mem.remember_alexander_request_response(
request_text="Catalog my requests.",
response_text="I will preserve them as artifacts.",
requester="Alexander Whitestone",
source="telegram:timmy-time",
)
assert doc_id == "doc-123"
kwargs = add_memory.call_args.kwargs
assert kwargs["room"] == "sovereign"
assert kwargs["wing"] == mem.wing
assert kwargs["extra_metadata"]["artifact_type"] == "alexander_request_response"
assert kwargs["extra_metadata"]["speaker_tags"] == ["speaker:alexander", "speaker:allegro"]
# ---------------------------------------------------------------------------
# MemoryHooks tests

View File

@@ -0,0 +1,58 @@
from pathlib import Path
import yaml
from nexus.mempalace.config import CORE_ROOMS
from nexus.mempalace.conversation_artifacts import (
ConversationArtifact,
build_request_response_artifact,
extract_alexander_request_pairs,
normalize_speaker,
)
def test_sovereign_room_is_core_room() -> None:
assert "sovereign" in CORE_ROOMS
rooms_yaml = yaml.safe_load(Path("mempalace/rooms.yaml").read_text())
assert any(room["key"] == "sovereign" for room in rooms_yaml["core_rooms"])
def test_normalize_speaker_maps_alexander_variants() -> None:
assert normalize_speaker("Alexander Whitestone") == "alexander"
assert normalize_speaker("Rockachopa") == "alexander"
assert normalize_speaker(" ALEXANDER ") == "alexander"
assert normalize_speaker("Bezalel") == "bezalel"
def test_build_request_response_artifact_creates_sovereign_metadata() -> None:
artifact = build_request_response_artifact(
requester="Alexander Whitestone",
responder="Allegro",
request_text="Please organize my conversation artifacts.",
response_text="I will catalog them under a sovereign room.",
source="telegram:timmy-time",
timestamp="2026-04-16T01:30:00Z",
)
assert isinstance(artifact, ConversationArtifact)
assert artifact.room == "sovereign"
assert artifact.metadata["speaker_tags"] == ["speaker:alexander", "speaker:allegro"]
assert artifact.metadata["artifact_type"] == "alexander_request_response"
assert artifact.metadata["responder"] == "allegro"
assert "## Alexander Request" in artifact.text
assert "## Wizard Response" in artifact.text
def test_extract_alexander_request_pairs_finds_following_wizard_response() -> None:
turns = [
{"speaker": "Alexander Whitestone", "text": "Catalog my requests as artifacts.", "timestamp": "2026-04-16T01:00:00Z"},
{"speaker": "Allegro", "text": "I'll build a sovereign room contract.", "timestamp": "2026-04-16T01:01:00Z"},
{"speaker": "Alexander", "text": "Make sure my words are easy to recall.", "timestamp": "2026-04-16T01:02:00Z"},
{"speaker": "Allegro", "text": "I will tag them with speaker metadata.", "timestamp": "2026-04-16T01:03:00Z"},
]
artifacts = extract_alexander_request_pairs(turns, responder="Allegro", source="telegram")
assert len(artifacts) == 2
assert artifacts[0].metadata["request_timestamp"] == "2026-04-16T01:00:00Z"
assert artifacts[1].metadata["response_timestamp"] == "2026-04-16T01:03:00Z"

View File

@@ -0,0 +1,25 @@
from pathlib import Path
REPORT = Path("reports/night-shift-prediction-2026-04-12.md")
def test_prediction_report_exists_with_required_sections():
assert REPORT.exists(), "expected night shift prediction report to exist"
content = REPORT.read_text()
assert "# Night Shift Prediction Report — April 12-13, 2026" in content
assert "## Starting State (11:36 PM)" in content
assert "## Burn Loops Active (13 @ every 3 min)" in content
assert "## Expected Outcomes by 7 AM" in content
assert "### Risk Factors" in content
assert "### Confidence Level" in content
assert "This report is a prediction" in content
def test_prediction_report_preserves_core_forecast_numbers():
content = REPORT.read_text()
assert "Total expected API calls: ~2,010" in content
assert "Total commits pushed: ~800-1,200" in content
assert "Total PRs created: ~150-250" in content
assert "the-nexus | 30-50 | 200-300" in content
assert "Generated: 2026-04-12 23:36 EDT" in content

View File

@@ -0,0 +1,45 @@
from __future__ import annotations
import importlib.util
import sys
from pathlib import Path
import yaml
PROJECT_ROOT = Path(__file__).parent.parent
_spec = importlib.util.spec_from_file_location(
"sync_branch_protection_test",
PROJECT_ROOT / "scripts" / "sync_branch_protection.py",
)
_mod = importlib.util.module_from_spec(_spec)
sys.modules["sync_branch_protection_test"] = _mod
_spec.loader.exec_module(_mod)
build_branch_protection_payload = _mod.build_branch_protection_payload
def test_build_branch_protection_payload_enables_rebase_before_merge():
payload = build_branch_protection_payload(
"main",
{
"required_approvals": 1,
"dismiss_stale_approvals": True,
"require_ci_to_merge": False,
"block_deletions": True,
"block_force_push": True,
"block_on_outdated_branch": True,
},
)
assert payload["branch_name"] == "main"
assert payload["rule_name"] == "main"
assert payload["block_on_outdated_branch"] is True
assert payload["required_approvals"] == 1
assert payload["enable_status_check"] is False
def test_the_nexus_branch_protection_config_requires_up_to_date_branch():
config = yaml.safe_load((PROJECT_ROOT / ".gitea" / "branch-protection" / "the-nexus.yml").read_text())
rules = config["rules"]
assert rules["block_on_outdated_branch"] is True