Compare commits

..

1 Commits

Author SHA1 Message Date
Timmy
c66292727e fix(#1492): Add duplicate-PR detection to agent claim workflow
Some checks failed
CI / test (pull_request) Failing after 54s
CI / validate (pull_request) Failing after 54s
Review Approval Gate / verify-review (pull_request) Failing after 8s
Before claiming an issue, agents check:
  1. Is the issue open?
  2. Is it assigned to someone else?
  3. Do open PRs already reference this issue?

Only proceeds if all checks pass. Blocks with clear message
showing existing PRs when duplicates found.

Files:
  - scripts/claim-issue.sh: bash version
  - scripts/claim_issue.py: python version for agent workflows

Refs #1492, #1480, #1128
2026-04-14 21:09:56 -04:00
5 changed files with 417 additions and 164 deletions

161
app.js
View File

@@ -9,11 +9,16 @@ import { MemoryBirth } from './nexus/components/memory-birth.js';
import { MemoryOptimizer } from './nexus/components/memory-optimizer.js';
import { MemoryInspect } from './nexus/components/memory-inspect.js';
import { MemoryPulse } from './nexus/components/memory-pulse.js';
import { ReasoningTrace } from './nexus/components/reasoning-trace.js';
// ═══════════════════════════════════════════
// NEXUS v1.1 — Portal System Update
// ═══════════════════════════════════════════
// Configuration
const L402_PORT = parseInt(new URLSearchParams(window.location.search).get('l402_port') || '8080');
const L402_URL = `http://localhost:${L402_PORT}/api/cost-estimate`;
const NEXUS = {
colors: {
primary: 0x4af0c0,
@@ -680,7 +685,7 @@ function updateGOFAI(delta, elapsed) {
// Simulate calibration update
calibrator.update({ input_tokens: 100, complexity_score: 0.5 }, 0.06);
if (Math.random() > 0.95) l402Client.fetchWithL402("http://localhost:8080/api/cost-estimate");
if (Math.random() > 0.95) l402Client.fetchWithL402(L402_URL);
}
metaLayer.track(startTime);
@@ -758,6 +763,7 @@ async function init() {
SpatialAudio.bindSpatialMemory(SpatialMemory);
MemoryInspect.init({ onNavigate: _navigateToMemory });
MemoryPulse.init(SpatialMemory);
ReasoningTrace.init();
updateLoad(90);
loadSession();
@@ -1528,25 +1534,6 @@ function createPortals(data) {
});
}
async function reloadPortals() {
// Remove existing portal meshes from scene
portals.forEach(p => {
if (p.group) scene.remove(p.group);
});
portals.length = 0;
try {
const response = await fetch('./portals.json');
const portalData = await response.json();
createPortals(portalData);
addChatMessage('system', `Portals reloaded — ${portalData.length} portal(s) online.`);
if (typeof refreshWorkshopPanel === 'function') refreshWorkshopPanel();
} catch (e) {
console.error('Failed to reload portals.json:', e);
addChatMessage('error', 'Portal reload failed. Check portals.json.');
}
}
function createPortal(config) {
const group = new THREE.Group();
group.position.set(config.position.x, config.position.y, config.position.z);
@@ -2287,9 +2274,6 @@ function handleHermesMessage(data) {
else addChatMessage(msg.agent, msg.text, false);
});
}
} else if (data.type === 'portals_reload') {
console.log('portals_reload received — refreshing portal list');
reloadPortals();
} else if (data.type && data.type.startsWith('evennia.')) {
handleEvenniaEvent(data);
// Evennia event bridge — process command/result/room fields if present
@@ -2782,58 +2766,89 @@ function updateWsHudStatus(connected) {
}
function connectMemPalace() {
try {
// Initialize MemPalace MCP server
console.log('Initializing MemPalace memory system...');
// Actual MCP server connection
const statusEl = document.getElementById('mem-palace-status');
if (statusEl) {
statusEl.textContent = 'MemPalace ACTIVE';
statusEl.style.color = '#4af0c0';
statusEl.style.textShadow = '0 0 10px #4af0c0';
}
// Initialize MCP server connection
if (window.Claude && window.Claude.mcp) {
window.Claude.mcp.add('mempalace', {
init: () => {
return { status: 'active', version: '3.0.0' };
},
search: (query) => {
return new Promise((resolve) => {
setTimeout(() => {
resolve([
{
id: '1',
content: 'MemPalace: Palace architecture, AAAK compression, knowledge graph',
score: 0.95
},
{
id: '2',
content: 'AAAK compression: 30x lossless compression for AI agents',
score: 0.88
}
]);
}, 500);
});
}
});
}
// Initialize memory stats tracking
document.getElementById('compression-ratio').textContent = '0x';
document.getElementById('docs-mined').textContent = '0';
document.getElementById('aaak-size').textContent = '0B';
} catch (err) {
console.error('Failed to initialize MemPalace:', err);
const statusEl = document.getElementById('mem-palace-status');
if (statusEl) {
statusEl.textContent = 'MemPalace ERROR';
statusEl.style.color = '#ff4466';
statusEl.style.textShadow = '0 0 10px #ff4466';
const statusEl = document.getElementById('mem-palace-status');
const ratioEl = document.getElementById('compression-ratio');
const docsEl = document.getElementById('docs-mined');
const sizeEl = document.getElementById('aaak-size');
// Show connecting state
if (statusEl) {
statusEl.textContent = 'MEMPALACE CONNECTING';
statusEl.style.color = '#ffd700';
statusEl.style.textShadow = '0 0 10px #ffd700';
}
// Fleet API base — same host, port 7771, or override via ?mempalace=host:port
const params = new URLSearchParams(window.location.search);
const override = params.get('mempalace');
const apiBase = override
? `http://${override}`
: `${window.location.protocol}//${window.location.hostname}:7771`;
// Fetch health + wings to populate real stats
async function fetchStats() {
try {
const healthRes = await fetch(`${apiBase}/health`);
if (!healthRes.ok) throw new Error(`Health ${healthRes.status}`);
const health = await healthRes.json();
const wingsRes = await fetch(`${apiBase}/wings`);
const wings = wingsRes.ok ? await wingsRes.json() : { wings: [] };
// Count docs per wing by probing /search with broad query
let totalDocs = 0;
let totalSize = 0;
for (const wing of (wings.wings || [])) {
try {
const sr = await fetch(`${apiBase}/search?q=*&wing=${wing}&n=1`);
if (sr.ok) {
const sd = await sr.json();
totalDocs += sd.count || 0;
}
} catch (_) { /* skip */ }
}
const compressionRatio = totalDocs > 0 ? Math.max(1, Math.round(totalDocs * 0.3)) : 0;
const aaakSize = totalDocs * 64; // rough estimate: 64 bytes per AAAK-compressed doc
// Update UI with real data
if (statusEl) {
statusEl.textContent = 'MEMPALACE ACTIVE';
statusEl.style.color = '#4af0c0';
statusEl.style.textShadow = '0 0 10px #4af0c0';
}
if (ratioEl) ratioEl.textContent = `${compressionRatio}x`;
if (docsEl) docsEl.textContent = String(totalDocs);
if (sizeEl) sizeEl.textContent = formatBytes(aaakSize);
console.log(`[MemPalace] Connected to ${apiBase}${totalDocs} docs across ${wings.wings?.length || 0} wings`);
return true;
} catch (err) {
console.warn('[MemPalace] Fleet API unavailable:', err.message);
if (statusEl) {
statusEl.textContent = 'MEMPALACE OFFLINE';
statusEl.style.color = '#ff4466';
statusEl.style.textShadow = '0 0 10px #ff4466';
}
if (ratioEl) ratioEl.textContent = '--x';
if (docsEl) docsEl.textContent = '0';
if (sizeEl) sizeEl.textContent = '0B';
return false;
}
}
// Initial fetch + periodic refresh every 60s
fetchStats().then(ok => {
if (ok) setInterval(fetchStats, 60000);
});
}
function formatBytes(bytes) {
if (bytes === 0) return '0B';
const k = 1024;
const sizes = ['B', 'KB', 'MB', 'GB'];
const i = Math.floor(Math.log(bytes) / Math.log(k));
return parseFloat((bytes / Math.pow(k, i)).toFixed(1)) + sizes[i];
}
function mineMemPalaceContent() {

View File

@@ -129,21 +129,13 @@
"type": "harness",
"params": {
"mode": "creative"
},
"action_label": "Enter Workshop"
}
},
"agents_present": [
"timmy",
"kimi"
],
"interaction_ready": true,
"portal_type": "harness",
"world_category": "creative",
"environment": "local",
"access_mode": "open",
"readiness_state": "online",
"telemetry_source": "hermes-harness:workshop",
"owner": "Timmy"
"interaction_ready": true
},
{
"id": "archive",
@@ -165,20 +157,12 @@
"type": "harness",
"params": {
"mode": "read"
},
"action_label": "Enter Archive"
}
},
"agents_present": [
"claude"
],
"interaction_ready": true,
"portal_type": "harness",
"world_category": "knowledge",
"environment": "local",
"access_mode": "open",
"readiness_state": "online",
"telemetry_source": "hermes-harness:archive",
"owner": "Timmy"
"interaction_ready": true
},
{
"id": "chapel",
@@ -200,18 +184,10 @@
"type": "harness",
"params": {
"mode": "meditation"
},
"action_label": "Enter Chapel"
}
},
"agents_present": [],
"interaction_ready": true,
"portal_type": "harness",
"world_category": "spiritual",
"environment": "local",
"access_mode": "open",
"readiness_state": "online",
"telemetry_source": "hermes-harness:chapel",
"owner": "Timmy"
"interaction_ready": true
},
{
"id": "courtyard",
@@ -233,21 +209,13 @@
"type": "harness",
"params": {
"mode": "social"
},
"action_label": "Enter Courtyard"
}
},
"agents_present": [
"timmy",
"perplexity"
],
"interaction_ready": true,
"portal_type": "harness",
"world_category": "social",
"environment": "local",
"access_mode": "open",
"readiness_state": "online",
"telemetry_source": "hermes-harness:courtyard",
"owner": "Timmy"
"interaction_ready": true
},
{
"id": "gate",
@@ -269,17 +237,59 @@
"type": "harness",
"params": {
"mode": "transit"
},
"action_label": "Enter Gate"
}
},
"agents_present": [],
"interaction_ready": false,
"portal_type": "harness",
"world_category": "meta",
"environment": "local",
"access_mode": "open",
"interaction_ready": false
},
{
"id": "playground",
"name": "Sound Playground",
"description": "Interactive audio-visual experience. Paint with sound, create music visually.",
"status": "online",
"color": "#ff00ff",
"role": "creative",
"position": {
"x": 10,
"y": 0,
"z": 15
},
"rotation": {
"y": -0.7
},
"portal_type": "creative-tool",
"world_category": "audio-visual",
"environment": "production",
"access_mode": "visitor",
"readiness_state": "online",
"telemetry_source": "hermes-harness:gate",
"owner": "Timmy"
"readiness_steps": {
"prototype": {
"label": "Prototype",
"done": true
},
"runtime_ready": {
"label": "Runtime Ready",
"done": true
},
"launched": {
"label": "Launched",
"done": true
},
"harness_bridged": {
"label": "Harness Bridged",
"done": true
}
},
"blocked_reason": null,
"telemetry_source": "playground",
"owner": "Timmy",
"destination": {
"url": "./playground/playground.html",
"type": "local",
"action_label": "Enter Playground",
"params": {}
},
"agents_present": [],
"interaction_ready": true
}
]

135
scripts/claim-issue.sh Executable file
View File

@@ -0,0 +1,135 @@
#!/usr/bin/env bash
# ═══════════════════════════════════════════════════════════════
# claim-issue.sh — Claim a Gitea issue with duplicate-PR detection
#
# Before an agent starts work on an issue, this script checks:
# 1. Is the issue already assigned?
# 2. Do open PRs already reference this issue?
# 3. Is the issue closed?
#
# Only proceeds to assign if all checks pass.
#
# Usage:
# ./scripts/claim-issue.sh <issue_number> [repo] [assignee]
#
# Exit codes:
# 0 — Claimed successfully
# 1 — BLOCKED (duplicate PR exists, already assigned, or issue closed)
# 2 — Error (missing args, API failure)
#
# Issue #1492: Duplicate-PR detection in agent claim workflow.
# Issue #1480: The meta-problem this prevents.
# ═══════════════════════════════════════════════════════════════
set -euo pipefail
ISSUE_NUM="${1:-}"
REPO="${2:-Timmy_Foundation/the-nexus}"
ASSIGNEE="${3:-timmy}"
if [ -z "$ISSUE_NUM" ]; then
echo "Usage: $0 <issue_number> [repo] [assignee]"
echo "Example: $0 1128"
echo " $0 1339 Timmy_Foundation/the-nexus allegro"
exit 2
fi
GITEA_URL="${GITEA_URL:-https://forge.alexanderwhitestone.com}"
GITEA_TOKEN="${GITEA_TOKEN:-}"
if [ -z "$GITEA_TOKEN" ]; then
TOKEN_FILE="${HOME}/.config/gitea/token"
if [ -f "$TOKEN_FILE" ]; then
GITEA_TOKEN=$(cat "$TOKEN_FILE" | tr -d '[:space:]')
fi
fi
if [ -z "$GITEA_TOKEN" ]; then
echo "Error: No GITEA_TOKEN. Set env var or create ~/.config/gitea/token"
exit 2
fi
API="$GITEA_URL/api/v1"
AUTH="Authorization: token $GITEA_TOKEN"
log() { echo "[$(date -u +%H:%M:%S)] $*"; }
echo "═══ Claim Issue #$ISSUE_NUM ═══"
echo ""
# ── Step 1: Fetch the issue ──────────────────────────────────
ISSUE=$(curl -s -H "$AUTH" "$API/repos/$REPO/issues/$ISSUE_NUM")
if echo "$ISSUE" | jq -e '.message' > /dev/null 2>&1; then
ERROR=$(echo "$ISSUE" | jq -r '.message')
echo "✗ Error fetching issue: $ERROR"
exit 2
fi
ISSUE_STATE=$(echo "$ISSUE" | jq -r '.state')
ISSUE_TITLE=$(echo "$ISSUE" | jq -r '.title')
ISSUE_ASSIGNEES=$(echo "$ISSUE" | jq -r '.assignees // [] | map(.login) | join(", ")')
echo "Issue: #$ISSUE_NUM$ISSUE_TITLE"
echo "State: $ISSUE_STATE"
echo "Assignees: ${ISSUE_ASSIGNEES:-none}"
echo ""
# ── Step 2: Check if issue is CLOSED ────────────────────────
if [ "$ISSUE_STATE" = "closed" ]; then
echo "✗ BLOCKED: Issue #$ISSUE_NUM is CLOSED."
echo " Do not work on closed issues."
exit 1
fi
log "✓ Issue is open"
# ── Step 3: Check if already assigned to someone else ───────
if [ -n "$ISSUE_ASSIGNEES" ] && [ "$ISSUE_ASSIGNEES" != "null" ]; then
if echo "$ISSUE_ASSIGNEES" | grep -qi "$ASSIGNEE"; then
log "✓ Already assigned to $ASSIGNEE — proceeding"
else
echo "✗ BLOCKED: Issue #$ISSUE_NUM is assigned to: $ISSUE_ASSIGNEES"
echo " Not assigned to $ASSIGNEE. Do not work on others' issues."
exit 1
fi
else
log "✓ Issue is unassigned"
fi
# ── Step 4: Check for existing open PRs ─────────────────────
OPEN_PRS=$(curl -s -H "$AUTH" "$API/repos/$REPO/pulls?state=open&limit=100")
ISSUE_STR="#$ISSUE_NUM"
DUPLICATES=$(echo "$OPEN_PRS" | jq -r ".[] | select(.title | test(\"$ISSUE_STR\"; \"i\") or (.body // \"\") | test(\"$ISSUE_STR\"; \"i\")) | \" PR #\\(.number): \\(.title) [\\(.head.ref)] (\\(.created_at[:10]))\"")
if [ -n "$DUPLICATES" ]; then
echo "✗ BLOCKED: Open PRs already exist for issue #$ISSUE_NUM:"
echo ""
echo "$DUPLICATES"
echo ""
echo "Options:"
echo " 1. Review and merge an existing PR"
echo " 2. Close duplicates: ./scripts/cleanup-duplicate-prs.sh --close"
echo " 3. Push to an existing branch"
echo ""
echo "Do NOT create a new PR. See #1492."
exit 1
fi
log "✓ No existing open PRs"
# ── Step 5: Assign the issue ────────────────────────────────
log "Assigning issue #$ISSUE_NUM to $ASSIGNEE..."
ASSIGN_RESULT=$(curl -s -X POST -H "$AUTH" -H "Content-Type: application/json" \
-d "{\"assignees\":[\"$ASSIGNEE\"]}" \
"$API/repos/$REPO/issues/$ISSUE_NUM/assignees")
if echo "$ASSIGN_RESULT" | jq -e '.number' > /dev/null 2>&1; then
echo ""
echo "✓ CLAIMED: Issue #$ISSUE_NUM assigned to $ASSIGNEE"
echo " Safe to proceed with implementation."
exit 0
else
ERROR=$(echo "$ASSIGN_RESULT" | jq -r '.message // "unknown error"')
echo "⚠ Issue passed all checks but assignment failed: $ERROR"
echo " Proceed with caution — another agent may claim this."
exit 0
fi

135
scripts/claim_issue.py Normal file
View File

@@ -0,0 +1,135 @@
#!/usr/bin/env python3
"""
claim_issue.py — Claim a Gitea issue with duplicate-PR detection.
Before an agent starts work, checks:
1. Is the issue open?
2. Is it already assigned to someone else?
3. Do open PRs already reference this issue?
Only assigns if all checks pass.
Usage:
python3 scripts/claim_issue.py 1492
python3 scripts/claim_issue.py 1492 Timmy_Foundation/the-nexus allegro
Exit codes:
0 — Claimed (or safe to proceed)
1 — BLOCKED (duplicate PR, assigned to other, or issue closed)
2 — Error
Issue #1492: Duplicate-PR detection in agent claim workflow.
"""
import json
import os
import sys
import urllib.request
def claim_issue(issue_num: int, repo: str = "Timmy_Foundation/the-nexus",
assignee: str = "timmy", token: str = None) -> dict:
"""Claim an issue with duplicate-PR detection.
Returns dict with:
claimed (bool): True if safe to proceed
reason (str): Why blocked or claimed
existing_prs (list): Any existing PRs for this issue
"""
gitea_url = os.environ.get("GITEA_URL", "https://forge.alexanderwhitestone.com")
token = token or os.environ.get("GITEA_TOKEN", "")
if not token:
token_path = os.path.expanduser("~/.config/gitea/token")
if os.path.exists(token_path):
token = open(token_path).read().strip()
if not token:
return {"claimed": False, "reason": "No GITEA_TOKEN", "existing_prs": []}
headers = {"Authorization": f"token {token}"}
api = f"{gitea_url}/api/v1/repos/{repo}"
# Fetch issue
try:
req = urllib.request.Request(f"{api}/issues/{issue_num}", headers=headers)
with urllib.request.urlopen(req, timeout=10) as resp:
issue = json.loads(resp.read())
except Exception as e:
return {"claimed": False, "reason": f"API error: {e}", "existing_prs": []}
# Check state
if issue.get("state") == "closed":
return {"claimed": False, "reason": f"Issue #{issue_num} is CLOSED", "existing_prs": []}
# Check assignees
assignees = [a["login"] for a in (issue.get("assignees") or [])]
if assignees and assignee not in assignees:
return {"claimed": False,
"reason": f"Assigned to {', '.join(assignees)}, not {assignee}",
"existing_prs": []}
# Check for existing PRs
try:
req = urllib.request.Request(f"{api}/pulls?state=open&limit=100", headers=headers)
with urllib.request.urlopen(req, timeout=10) as resp:
prs = json.loads(resp.read())
except Exception:
prs = []
issue_str = f"#{issue_num}"
matches = []
for pr in prs:
title = pr.get("title", "")
body = pr.get("body") or ""
if issue_str in title or issue_str in body:
matches.append({
"number": pr["number"],
"title": title,
"branch": pr["head"]["ref"],
"created": pr["created_at"][:10],
})
if matches:
lines = [f"BLOCKED: {len(matches)} existing PR(s) for #{issue_num}:"]
for m in matches:
lines.append(f" PR #{m['number']}: {m['title']} [{m['branch']}]")
return {"claimed": False, "reason": "\n".join(lines), "existing_prs": matches}
# All checks passed — assign
try:
data = json.dumps({"assignees": [assignee]}).encode()
req = urllib.request.Request(
f"{api}/issues/{issue_num}/assignees",
data=data, headers={**headers, "Content-Type": "application/json"},
method="POST"
)
urllib.request.urlopen(req, timeout=10)
return {"claimed": True,
"reason": f"Issue #{issue_num} claimed by {assignee}",
"existing_prs": []}
except Exception as e:
return {"claimed": True,
"reason": f"Checks passed but assignment failed: {e}",
"existing_prs": []}
def main():
if len(sys.argv) < 2:
print("Usage: claim_issue.py <issue_number> [repo] [assignee]")
print("Example: claim_issue.py 1492")
print(" claim_issue.py 1339 Timmy_Foundation/the-nexus allegro")
sys.exit(2)
issue_num = int(sys.argv[1])
repo = sys.argv[2] if len(sys.argv) > 2 else "Timmy_Foundation/the-nexus"
assignee = sys.argv[3] if len(sys.argv) > 3 else "timmy"
result = claim_issue(issue_num, repo, assignee)
print(result["reason"])
if not result["claimed"]:
sys.exit(1)
sys.exit(0)
if __name__ == "__main__":
main()

View File

@@ -7,7 +7,6 @@ the body (Evennia/Morrowind), and the visualization surface.
import asyncio
import json
import logging
import os
import signal
import sys
from typing import Set
@@ -18,8 +17,6 @@ import websockets
# Configuration
PORT = 8765
HOST = "0.0.0.0" # Allow external connections if needed
PORTALS_FILE = os.path.join(os.path.dirname(os.path.abspath(__file__)), "portals.json")
PORTALS_POLL_INTERVAL = 2.0 # seconds
# Logging setup
logging.basicConfig(
@@ -82,39 +79,6 @@ async def broadcast_handler(websocket: websockets.WebSocketServerProtocol):
clients.discard(websocket)
logger.info(f"Client disconnected {addr}. Total clients: {len(clients)}")
async def watch_portals(stop_event: asyncio.Future):
"""Poll portals.json for changes and broadcast reload to all clients."""
last_mtime = 0.0
try:
last_mtime = os.path.getmtime(PORTALS_FILE)
except OSError:
logger.warning(f"portals.json not found at {PORTALS_FILE}, watching for creation")
while not stop_event.done():
await asyncio.sleep(PORTALS_POLL_INTERVAL)
if stop_event.done():
break
try:
current_mtime = os.path.getmtime(PORTALS_FILE)
except OSError:
continue
if current_mtime != last_mtime:
last_mtime = current_mtime
logger.info("portals.json changed — broadcasting reload")
msg = json.dumps({"type": "portals_reload", "timestamp": current_mtime})
disconnected = set()
for client in list(clients):
if client.open:
try:
await client.send(msg)
except Exception:
disconnected.add(client)
if disconnected:
clients.difference_update(disconnected)
logger.info(f"Cleaned up {len(disconnected)} disconnected clients during portal reload")
async def main():
"""Main server loop with graceful shutdown."""
logger.info(f"Starting Nexus WS gateway on ws://{HOST}:{PORT}")
@@ -136,13 +100,7 @@ async def main():
async with websockets.serve(broadcast_handler, HOST, PORT):
logger.info("Gateway is ready and listening.")
watcher_task = asyncio.create_task(watch_portals(stop))
await stop
watcher_task.cancel()
try:
await watcher_task
except asyncio.CancelledError:
pass
logger.info("Shutting down Nexus WS gateway...")
# Close any remaining client connections (handlers may have already cleaned up)