Compare commits
1 Commits
fix/623
...
data/scene
| Author | SHA1 | Date | |
|---|---|---|---|
| 66ce20bbbc |
@@ -31,14 +31,6 @@ class GlitchCategory(Enum):
|
||||
WATER_REFLECTION = "water_reflection"
|
||||
SKYBOX_SEAM = "skybox_seam"
|
||||
|
||||
# Three.js-specific categories (ref: timmy-config#543)
|
||||
SHADER_FAILURE = "shader_failure"
|
||||
TEXTURE_PLACEHOLDER = "texture_placeholder"
|
||||
UV_MAPPING_ERROR = "uv_mapping_error"
|
||||
FRUSTUM_CULLING = "frustum_culling"
|
||||
SHADOW_MAP_ARTIFACT = "shadow_map_artifact"
|
||||
BLOOM_OVERFLOW = "bloom_overflow"
|
||||
|
||||
|
||||
@dataclass
|
||||
class GlitchPattern:
|
||||
@@ -249,123 +241,6 @@ MATRIX_GLITCH_PATTERNS: list[GlitchPattern] = [
|
||||
],
|
||||
confidence_threshold=0.45,
|
||||
),
|
||||
|
||||
# --- Three.js-Specific Glitch Patterns (ref: timmy-config#543) ---
|
||||
GlitchPattern(
|
||||
category=GlitchCategory.SHADER_FAILURE,
|
||||
name="Shader Compilation Failure",
|
||||
description="Three.js shader failed to compile, rendering the material as solid black. "
|
||||
"Common when custom ShaderMaterial has syntax errors or missing uniforms.",
|
||||
severity=GlitchSeverity.CRITICAL,
|
||||
detection_prompts=[
|
||||
"Look for objects or surfaces rendered as pure black (#000000) that should have visible textures or materials.",
|
||||
"Identify geometry that appears completely dark while surrounding objects are normally lit.",
|
||||
"Check for objects where the material seems to 'absorb all light' — flat black with no shading gradient.",
|
||||
],
|
||||
visual_indicators=[
|
||||
"solid black object with no shading",
|
||||
"geometry rendered as silhouette",
|
||||
"material appears to absorb light entirely",
|
||||
"black patch inconsistent with scene lighting",
|
||||
],
|
||||
confidence_threshold=0.7,
|
||||
),
|
||||
GlitchPattern(
|
||||
category=GlitchCategory.TEXTURE_PLACEHOLDER,
|
||||
name="Three.js Texture Not Loaded",
|
||||
description="Three.js failed to load the texture asset, rendering a 1x1 white pixel "
|
||||
"stretched across the entire surface. Distinguished from missing-texture by "
|
||||
"the uniform white/grey appearance rather than magenta.",
|
||||
severity=GlitchSeverity.CRITICAL,
|
||||
detection_prompts=[
|
||||
"Look for surfaces that are uniformly white or light grey with no texture detail, even on large geometry.",
|
||||
"Identify objects where the texture appears as a single solid color stretched across complex UVs.",
|
||||
"Check for surfaces that look 'blank' or 'unloaded' — flat white/grey where detail should exist.",
|
||||
],
|
||||
visual_indicators=[
|
||||
"uniform white or light grey surface",
|
||||
"no texture detail on large geometry",
|
||||
"stretched single-color appearance",
|
||||
"1x1 pixel placeholder stretched to fill UV space",
|
||||
],
|
||||
confidence_threshold=0.65,
|
||||
),
|
||||
GlitchPattern(
|
||||
category=GlitchCategory.UV_MAPPING_ERROR,
|
||||
name="BufferGeometry UV Mapping Error",
|
||||
description="Three.js BufferGeometry has incorrect UV coordinates, causing textures to "
|
||||
"appear stretched, compressed, or mapped to the wrong faces.",
|
||||
severity=GlitchSeverity.HIGH,
|
||||
detection_prompts=[
|
||||
"Look for textures that appear dramatically stretched in one direction on specific faces.",
|
||||
"Identify surfaces where the texture pattern is distorted but other nearby surfaces look correct.",
|
||||
"Check for faces where the texture seems 'smeared' or mapped with incorrect aspect ratio.",
|
||||
],
|
||||
visual_indicators=[
|
||||
"texture stretching on specific faces",
|
||||
"distorted pattern on geometry",
|
||||
"smeared texture appearance",
|
||||
"aspect ratio mismatch between texture and surface",
|
||||
],
|
||||
confidence_threshold=0.6,
|
||||
),
|
||||
GlitchPattern(
|
||||
category=GlitchCategory.FRUSTUM_CULLING,
|
||||
name="Frustum Culling Artifact",
|
||||
description="Three.js frustum culling incorrectly marks objects as outside the camera "
|
||||
"frustum, causing them to pop in/out of existence at screen edges.",
|
||||
severity=GlitchSeverity.MEDIUM,
|
||||
detection_prompts=[
|
||||
"Look for objects that are partially visible at the edge of the frame — half-rendered or cut off unnaturally.",
|
||||
"Identify geometry that seems to 'pop' into existence as the view angle changes.",
|
||||
"Check screen edges for objects that appear suddenly rather than smoothly entering the viewport.",
|
||||
],
|
||||
visual_indicators=[
|
||||
"half-visible object at screen edge",
|
||||
"object popping into frame",
|
||||
"abrupt appearance of geometry",
|
||||
"bounding box visible but mesh missing",
|
||||
],
|
||||
confidence_threshold=0.55,
|
||||
),
|
||||
GlitchPattern(
|
||||
category=GlitchCategory.SHADOW_MAP_ARTIFACT,
|
||||
name="Shadow Map Resolution Artifact",
|
||||
description="Three.js shadow map has insufficient resolution, causing pixelated, "
|
||||
"blocky shadows with visible texel edges instead of smooth shadow gradients.",
|
||||
severity=GlitchSeverity.MEDIUM,
|
||||
detection_prompts=[
|
||||
"Look for shadows with visible blocky or pixelated edges instead of smooth gradients.",
|
||||
"Identify shadow maps where individual texels (texture pixels) are clearly visible.",
|
||||
"Check for shadows that appear as jagged stair-stepped patterns rather than soft edges.",
|
||||
],
|
||||
visual_indicators=[
|
||||
"blocky shadow edges",
|
||||
"visible texel grid in shadows",
|
||||
"stair-stepped shadow boundary",
|
||||
"pixelated shadow gradient",
|
||||
],
|
||||
confidence_threshold=0.55,
|
||||
),
|
||||
GlitchPattern(
|
||||
category=GlitchCategory.BLOOM_OVERFLOW,
|
||||
name="Post-Processing Bloom Overflow",
|
||||
description="Three.js UnrealBloomPass or similar post-processing bloom effect is too "
|
||||
"intense, causing bright areas to bleed glow into surrounding geometry.",
|
||||
severity=GlitchSeverity.LOW,
|
||||
detection_prompts=[
|
||||
"Look for bright areas that have an unusually large, soft glow bleeding into adjacent surfaces.",
|
||||
"Identify scenes where light sources appear to have a 'halo' that extends beyond physical plausibility.",
|
||||
"Check for bright objects whose glow color bleeds onto nearby unrelated geometry.",
|
||||
],
|
||||
visual_indicators=[
|
||||
"excessive glow bleeding from bright surfaces",
|
||||
"halo around light sources",
|
||||
"bloom color tinting adjacent geometry",
|
||||
"glow bleeding beyond object boundaries",
|
||||
],
|
||||
confidence_threshold=0.5,
|
||||
),
|
||||
]
|
||||
|
||||
|
||||
@@ -414,23 +289,6 @@ def build_vision_prompt(patterns: list[GlitchPattern] | None = None) -> str:
|
||||
)
|
||||
|
||||
|
||||
|
||||
# Three.js-specific category set for filtering (ref: timmy-config#543)
|
||||
THREEJS_CATEGORIES = {
|
||||
GlitchCategory.SHADER_FAILURE,
|
||||
GlitchCategory.TEXTURE_PLACEHOLDER,
|
||||
GlitchCategory.UV_MAPPING_ERROR,
|
||||
GlitchCategory.FRUSTUM_CULLING,
|
||||
GlitchCategory.SHADOW_MAP_ARTIFACT,
|
||||
GlitchCategory.BLOOM_OVERFLOW,
|
||||
}
|
||||
|
||||
|
||||
def get_threejs_patterns() -> list[GlitchPattern]:
|
||||
"""Return only Three.js-specific glitch patterns."""
|
||||
return [p for p in MATRIX_GLITCH_PATTERNS if p.category in THREEJS_CATEGORIES]
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
import json
|
||||
print(f"Loaded {len(MATRIX_GLITCH_PATTERNS)} glitch patterns:\n")
|
||||
|
||||
@@ -9,7 +9,7 @@ Usage:
|
||||
python matrix_glitch_detector.py <url> [--angles 4] [--output report.json]
|
||||
python matrix_glitch_detector.py --demo # Run with synthetic test data
|
||||
|
||||
Ref: timmy-config#491, timmy-config#543
|
||||
Ref: timmy-config#491
|
||||
"""
|
||||
|
||||
import argparse
|
||||
@@ -33,7 +33,6 @@ from glitch_patterns import (
|
||||
MATRIX_GLITCH_PATTERNS,
|
||||
build_vision_prompt,
|
||||
get_patterns_by_severity,
|
||||
get_threejs_patterns,
|
||||
)
|
||||
|
||||
|
||||
@@ -346,17 +345,14 @@ def _parse_vision_response(
|
||||
|
||||
def _infer_severity(category: str, confidence: float) -> str:
|
||||
"""Infer severity from category and confidence when not provided."""
|
||||
critical_cats = {"missing_textures", "clipping", "shader_failure", "texture_placeholder"}
|
||||
high_cats = {"floating_assets", "broken_normals", "uv_mapping_error"}
|
||||
medium_cats = {"frustum_culling", "shadow_map_artifact"}
|
||||
critical_cats = {"missing_textures", "clipping"}
|
||||
high_cats = {"floating_assets", "broken_normals"}
|
||||
|
||||
cat_lower = category.lower()
|
||||
if any(c in cat_lower for c in critical_cats):
|
||||
return "critical" if confidence > 0.7 else "high"
|
||||
if any(c in cat_lower for c in high_cats):
|
||||
return "high" if confidence > 0.7 else "medium"
|
||||
if any(c in cat_lower for c in medium_cats):
|
||||
return "medium" if confidence > 0.6 else "low"
|
||||
return "medium" if confidence > 0.6 else "low"
|
||||
|
||||
|
||||
@@ -393,9 +389,9 @@ def build_report(
|
||||
),
|
||||
},
|
||||
metadata={
|
||||
"detector_version": "0.2.0",
|
||||
"detector_version": "0.1.0",
|
||||
"pattern_count": len(MATRIX_GLITCH_PATTERNS),
|
||||
"reference": "timmy-config#491, timmy-config#543",
|
||||
"reference": "timmy-config#491",
|
||||
},
|
||||
)
|
||||
|
||||
@@ -464,30 +460,6 @@ def run_demo(output_path: Optional[Path] = None) -> ScanResult:
|
||||
screenshot_index=3,
|
||||
screenshot_angle="left",
|
||||
),
|
||||
DetectedGlitch(
|
||||
id=str(uuid.uuid4())[:8],
|
||||
category="shader_failure",
|
||||
name="Black Material on Portal Frame",
|
||||
description="Portal frame rendered as solid black — shader compilation failed (missing uniform u_time)",
|
||||
severity="critical",
|
||||
confidence=0.91,
|
||||
location_x=45.0,
|
||||
location_y=30.0,
|
||||
screenshot_index=0,
|
||||
screenshot_angle="front",
|
||||
),
|
||||
DetectedGlitch(
|
||||
id=str(uuid.uuid4())[:8],
|
||||
category="shadow_map_artifact",
|
||||
name="Pixelated Character Shadow",
|
||||
description="Character shadow shows visible texel grid — shadow map resolution too low (512x512)",
|
||||
severity="medium",
|
||||
confidence=0.78,
|
||||
location_x=52.0,
|
||||
location_y=75.0,
|
||||
screenshot_index=1,
|
||||
screenshot_angle="right",
|
||||
),
|
||||
]
|
||||
|
||||
print(f"[*] Detected {len(demo_glitches)} glitches")
|
||||
@@ -524,11 +496,6 @@ Examples:
|
||||
help="Minimum severity to include in report",
|
||||
)
|
||||
parser.add_argument("--verbose", "-v", action="store_true", help="Verbose output")
|
||||
parser.add_argument(
|
||||
"--threejs",
|
||||
action="store_true",
|
||||
help="Focus on Three.js-specific glitch patterns only (shader, texture, UV, culling, shadow, bloom)",
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
@@ -558,13 +525,9 @@ Examples:
|
||||
screenshots = capture_screenshots(args.url, angles, screenshots_dir)
|
||||
print(f"[*] Captured {len(screenshots)} screenshots")
|
||||
|
||||
# Filter patterns by severity and type
|
||||
# Filter patterns by severity
|
||||
min_sev = GlitchSeverity(args.min_severity)
|
||||
patterns = get_patterns_by_severity(min_sev)
|
||||
if args.threejs:
|
||||
threejs_patterns = get_threejs_patterns()
|
||||
patterns = [p for p in patterns if p in threejs_patterns]
|
||||
print(f"[*] Three.js-focused mode: {len(patterns)} patterns")
|
||||
|
||||
# Analyze with vision AI
|
||||
print(f"[*] Analyzing with vision AI ({len(patterns)} patterns)...")
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Full Nostr agent-to-agent communication demo - FINAL WORKING
|
||||
"""
|
||||
|
||||
@@ -1,271 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Pre-Flight Provider Check Script
|
||||
Issue #508: [Robustness] Credential drain detection — provider health checks
|
||||
|
||||
Pre-flight check before session launch: verifies provider credentials and balance.
|
||||
|
||||
Usage:
|
||||
python3 preflight-provider-check.py # Check all providers
|
||||
python3 preflight-provider-check.py --launch # Check and return exit code
|
||||
python3 preflight-provider-check.py --balance # Check OpenRouter balance
|
||||
"""
|
||||
|
||||
import os, sys, json, yaml, urllib.request
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
|
||||
# Configuration
|
||||
HERMES_HOME = Path(os.environ.get("HERMES_HOME", Path.home() / ".hermes"))
|
||||
LOG_DIR = Path.home() / ".local" / "timmy" / "fleet-health"
|
||||
LOG_FILE = LOG_DIR / "preflight-check.log"
|
||||
|
||||
def log(msg):
|
||||
"""Log message to file and optionally console."""
|
||||
timestamp = datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S")
|
||||
log_entry = "[" + timestamp + "] " + msg
|
||||
|
||||
LOG_DIR.mkdir(parents=True, exist_ok=True)
|
||||
with open(LOG_FILE, "a") as f:
|
||||
f.write(log_entry + "\n")
|
||||
|
||||
if "--quiet" not in sys.argv:
|
||||
print(log_entry)
|
||||
|
||||
def get_provider_api_key(provider):
|
||||
"""Get API key for a provider from .env or environment."""
|
||||
env_file = HERMES_HOME / ".env"
|
||||
if env_file.exists():
|
||||
with open(env_file) as f:
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
if line.startswith(provider.upper() + "_API_KEY="):
|
||||
return line.split("=", 1)[1].strip().strip("'\"")
|
||||
|
||||
return os.environ.get(provider.upper() + "_API_KEY")
|
||||
|
||||
def check_openrouter_balance(api_key):
|
||||
"""Check OpenRouter balance via /api/v1/auth/key."""
|
||||
if not api_key:
|
||||
return False, "No API key", 0
|
||||
|
||||
try:
|
||||
req = urllib.request.Request(
|
||||
"https://openrouter.ai/api/v1/auth/key",
|
||||
headers={"Authorization": "Bearer " + api_key}
|
||||
)
|
||||
resp = urllib.request.urlopen(req, timeout=10)
|
||||
data = json.loads(resp.read())
|
||||
|
||||
# Check for credits
|
||||
credits = data.get("data", {}).get("limit", 0)
|
||||
usage = data.get("data", {}).get("usage", 0)
|
||||
remaining = credits - usage if credits else None
|
||||
|
||||
if remaining is not None and remaining <= 0:
|
||||
return False, "No credits remaining", 0
|
||||
elif remaining is not None:
|
||||
return True, "Credits available", remaining
|
||||
else:
|
||||
return True, "Unlimited or unknown balance", None
|
||||
|
||||
except urllib.error.HTTPError as e:
|
||||
if e.code == 401:
|
||||
return False, "Invalid API key", 0
|
||||
else:
|
||||
return False, "HTTP " + str(e.code), 0
|
||||
except Exception as e:
|
||||
return False, str(e)[:100], 0
|
||||
|
||||
def check_nous_key(api_key):
|
||||
"""Check Nous API key with minimal test call."""
|
||||
if not api_key:
|
||||
return False, "No API key"
|
||||
|
||||
try:
|
||||
req = urllib.request.Request(
|
||||
"https://inference.nousresearch.com/v1/models",
|
||||
headers={"Authorization": "Bearer " + api_key}
|
||||
)
|
||||
resp = urllib.request.urlopen(req, timeout=10)
|
||||
|
||||
if resp.status == 200:
|
||||
return True, "Valid key"
|
||||
else:
|
||||
return False, "HTTP " + str(resp.status)
|
||||
|
||||
except urllib.error.HTTPError as e:
|
||||
if e.code == 401:
|
||||
return False, "Invalid API key"
|
||||
elif e.code == 403:
|
||||
return False, "Forbidden"
|
||||
else:
|
||||
return False, "HTTP " + str(e.code)
|
||||
except Exception as e:
|
||||
return False, str(e)[:100]
|
||||
|
||||
def check_anthropic_key(api_key):
|
||||
"""Check Anthropic API key with minimal test call."""
|
||||
if not api_key:
|
||||
return False, "No API key"
|
||||
|
||||
try:
|
||||
req = urllib.request.Request(
|
||||
"https://api.anthropic.com/v1/models",
|
||||
headers={
|
||||
"x-api-key": api_key,
|
||||
"anthropic-version": "2023-06-01"
|
||||
}
|
||||
)
|
||||
resp = urllib.request.urlopen(req, timeout=10)
|
||||
|
||||
if resp.status == 200:
|
||||
return True, "Valid key"
|
||||
else:
|
||||
return False, "HTTP " + str(resp.status)
|
||||
|
||||
except urllib.error.HTTPError as e:
|
||||
if e.code == 401:
|
||||
return False, "Invalid API key"
|
||||
elif e.code == 403:
|
||||
return False, "Forbidden"
|
||||
else:
|
||||
return False, "HTTP " + str(e.code)
|
||||
except Exception as e:
|
||||
return False, str(e)[:100]
|
||||
|
||||
def check_ollama():
|
||||
"""Check if Ollama is running."""
|
||||
try:
|
||||
req = urllib.request.Request("http://localhost:11434/api/tags")
|
||||
resp = urllib.request.urlopen(req, timeout=5)
|
||||
|
||||
if resp.status == 200:
|
||||
data = json.loads(resp.read())
|
||||
models = data.get("models", [])
|
||||
return True, str(len(models)) + " models loaded"
|
||||
else:
|
||||
return False, "HTTP " + str(resp.status)
|
||||
|
||||
except Exception as e:
|
||||
return False, str(e)[:100]
|
||||
|
||||
def get_configured_provider():
|
||||
"""Get the configured provider from global config."""
|
||||
config_file = HERMES_HOME / "config.yaml"
|
||||
if not config_file.exists():
|
||||
return None
|
||||
|
||||
try:
|
||||
with open(config_file) as f:
|
||||
config = yaml.safe_load(f)
|
||||
|
||||
model_config = config.get("model", {})
|
||||
if isinstance(model_config, dict):
|
||||
return model_config.get("provider")
|
||||
except:
|
||||
pass
|
||||
|
||||
return None
|
||||
|
||||
def run_preflight_check():
|
||||
"""Run pre-flight check on all providers."""
|
||||
log("=== Pre-Flight Provider Check ===")
|
||||
|
||||
results = {}
|
||||
|
||||
# Check OpenRouter
|
||||
or_key = get_provider_api_key("openrouter")
|
||||
or_ok, or_msg, or_balance = check_openrouter_balance(or_key)
|
||||
results["openrouter"] = {"healthy": or_ok, "message": or_msg, "balance": or_balance}
|
||||
|
||||
# Check Nous
|
||||
nous_key = get_provider_api_key("nous")
|
||||
nous_ok, nous_msg = check_nous_key(nous_key)
|
||||
results["nous"] = {"healthy": nous_ok, "message": nous_msg}
|
||||
|
||||
# Check Anthropic
|
||||
anthropic_key = get_provider_api_key("anthropic")
|
||||
anthropic_ok, anthropic_msg = check_anthropic_key(anthropic_key)
|
||||
results["anthropic"] = {"healthy": anthropic_ok, "message": anthropic_msg}
|
||||
|
||||
# Check Ollama
|
||||
ollama_ok, ollama_msg = check_ollama()
|
||||
results["ollama"] = {"healthy": ollama_ok, "message": ollama_msg}
|
||||
|
||||
# Get configured provider
|
||||
configured = get_configured_provider()
|
||||
|
||||
# Summary
|
||||
healthy_count = sum(1 for r in results.values() if r["healthy"])
|
||||
total_count = len(results)
|
||||
|
||||
log("Results: " + str(healthy_count) + "/" + str(total_count) + " providers healthy")
|
||||
|
||||
for provider, result in results.items():
|
||||
status = "HEALTHY" if result["healthy"] else "UNHEALTHY"
|
||||
extra = ""
|
||||
if provider == "openrouter" and result.get("balance") is not None:
|
||||
extra = " (balance: " + str(result["balance"]) + ")"
|
||||
|
||||
log(" " + provider + ": " + status + " - " + result["message"] + extra)
|
||||
|
||||
if configured:
|
||||
log("Configured provider: " + configured)
|
||||
if configured in results and not results[configured]["healthy"]:
|
||||
log("WARNING: Configured provider " + configured + " is UNHEALTHY!")
|
||||
|
||||
return results, configured
|
||||
|
||||
def check_launch_readiness():
|
||||
"""Check if we're ready to launch sessions."""
|
||||
results, configured = run_preflight_check()
|
||||
|
||||
# Check if configured provider is healthy
|
||||
if configured and configured in results:
|
||||
if not results[configured]["healthy"]:
|
||||
log("LAUNCH BLOCKED: Configured provider " + configured + " is unhealthy")
|
||||
return False, configured + " is unhealthy"
|
||||
|
||||
# Check if at least one provider is healthy
|
||||
healthy_providers = [p for p, r in results.items() if r["healthy"]]
|
||||
if not healthy_providers:
|
||||
log("LAUNCH BLOCKED: No healthy providers available")
|
||||
return False, "No healthy providers"
|
||||
|
||||
log("LAUNCH READY: " + str(len(healthy_providers)) + " healthy providers available")
|
||||
return True, "Ready"
|
||||
|
||||
def show_balance():
|
||||
"""Show OpenRouter balance."""
|
||||
api_key = get_provider_api_key("openrouter")
|
||||
if not api_key:
|
||||
print("No OpenRouter API key found")
|
||||
return
|
||||
|
||||
ok, msg, balance = check_openrouter_balance(api_key)
|
||||
|
||||
if ok:
|
||||
if balance is not None:
|
||||
print("OpenRouter balance: " + str(balance) + " credits")
|
||||
else:
|
||||
print("OpenRouter: " + msg)
|
||||
else:
|
||||
print("OpenRouter: " + msg)
|
||||
|
||||
def main():
|
||||
if "--balance" in sys.argv:
|
||||
show_balance()
|
||||
elif "--launch" in sys.argv:
|
||||
ready, message = check_launch_readiness()
|
||||
if ready:
|
||||
print("READY")
|
||||
sys.exit(0)
|
||||
else:
|
||||
print("BLOCKED: " + message)
|
||||
sys.exit(1)
|
||||
else:
|
||||
run_preflight_check()
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -1,411 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Provider Health Monitor Script
|
||||
Issue #509: [Robustness] Provider-aware profile config — auto-switch on failure
|
||||
|
||||
Monitors provider health and automatically switches profiles to working providers.
|
||||
|
||||
Usage:
|
||||
python3 provider-health-monitor.py # Run once
|
||||
python3 provider-health-monitor.py --daemon # Run continuously
|
||||
python3 provider-health-monitor.py --status # Show provider health
|
||||
"""
|
||||
|
||||
import os, sys, json, yaml, urllib.request, time
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
|
||||
# Configuration
|
||||
HERMES_HOME = Path(os.environ.get("HERMES_HOME", Path.home() / ".hermes"))
|
||||
PROFILES_DIR = HERMES_HOME / "profiles"
|
||||
LOG_DIR = Path.home() / ".local" / "timmy" / "fleet-health"
|
||||
STATE_FILE = LOG_DIR / "tmux-state.json"
|
||||
LOG_FILE = LOG_DIR / "provider-health.log"
|
||||
|
||||
# Provider test endpoints
|
||||
PROVIDER_TESTS = {
|
||||
"openrouter": {
|
||||
"url": "https://openrouter.ai/api/v1/models",
|
||||
"method": "GET",
|
||||
"headers": lambda api_key: {"Authorization": "Bearer " + api_key},
|
||||
"timeout": 10
|
||||
},
|
||||
"anthropic": {
|
||||
"url": "https://api.anthropic.com/v1/models",
|
||||
"method": "GET",
|
||||
"headers": lambda api_key: {"x-api-key": api_key, "anthropic-version": "2023-06-01"},
|
||||
"timeout": 10
|
||||
},
|
||||
"nous": {
|
||||
"url": "https://inference.nousresearch.com/v1/models",
|
||||
"method": "GET",
|
||||
"headers": lambda api_key: {"Authorization": "Bearer " + api_key},
|
||||
"timeout": 10
|
||||
},
|
||||
"kimi-coding": {
|
||||
"url": "https://api.kimi.com/coding/v1/models",
|
||||
"method": "GET",
|
||||
"headers": lambda api_key: {"x-api-key": api_key, "x-api-provider": "kimi-coding"},
|
||||
"timeout": 10
|
||||
},
|
||||
"ollama": {
|
||||
"url": "http://localhost:11434/api/tags",
|
||||
"method": "GET",
|
||||
"headers": lambda api_key: {},
|
||||
"timeout": 5
|
||||
}
|
||||
}
|
||||
|
||||
def log(msg):
|
||||
"""Log message to file and optionally console."""
|
||||
timestamp = datetime.now(timezone.utc).strftime("%Y-%m-%d %H:%M:%S")
|
||||
log_entry = "[" + timestamp + "] " + msg
|
||||
|
||||
LOG_DIR.mkdir(parents=True, exist_ok=True)
|
||||
with open(LOG_FILE, "a") as f:
|
||||
f.write(log_entry + "\n")
|
||||
|
||||
if "--quiet" not in sys.argv:
|
||||
print(log_entry)
|
||||
|
||||
def get_provider_api_key(provider):
|
||||
"""Get API key for a provider from .env or environment."""
|
||||
env_file = HERMES_HOME / ".env"
|
||||
if env_file.exists():
|
||||
with open(env_file) as f:
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
if line.startswith(provider.upper() + "_API_KEY="):
|
||||
return line.split("=", 1)[1].strip().strip("'\"")
|
||||
|
||||
return os.environ.get(provider.upper() + "_API_KEY")
|
||||
|
||||
def test_provider(provider, api_key=None):
|
||||
"""Test if a provider is healthy."""
|
||||
config = PROVIDER_TESTS.get(provider)
|
||||
if not config:
|
||||
return False, "Unknown provider: " + provider
|
||||
|
||||
headers = config["headers"](api_key or "")
|
||||
|
||||
try:
|
||||
req = urllib.request.Request(
|
||||
config["url"],
|
||||
headers=headers,
|
||||
method=config["method"]
|
||||
)
|
||||
resp = urllib.request.urlopen(req, timeout=config["timeout"])
|
||||
|
||||
if resp.status == 200:
|
||||
return True, "Healthy"
|
||||
else:
|
||||
return False, "HTTP " + str(resp.status)
|
||||
|
||||
except urllib.error.HTTPError as e:
|
||||
if e.code == 401:
|
||||
return False, "Unauthorized (401)"
|
||||
elif e.code == 403:
|
||||
return False, "Forbidden (403)"
|
||||
elif e.code == 429:
|
||||
return True, "Rate limited but accessible"
|
||||
else:
|
||||
return False, "HTTP " + str(e.code)
|
||||
except Exception as e:
|
||||
return False, str(e)[:100]
|
||||
|
||||
def get_all_providers():
|
||||
"""Get all providers from profiles and global config."""
|
||||
providers = set()
|
||||
|
||||
# Global config
|
||||
global_config = HERMES_HOME / "config.yaml"
|
||||
if global_config.exists():
|
||||
try:
|
||||
with open(global_config) as f:
|
||||
config = yaml.safe_load(f)
|
||||
|
||||
# Primary model provider
|
||||
model_config = config.get("model", {})
|
||||
if isinstance(model_config, dict):
|
||||
provider = model_config.get("provider", "")
|
||||
if provider:
|
||||
providers.add(provider)
|
||||
|
||||
# Auxiliary providers
|
||||
auxiliary = config.get("auxiliary", {})
|
||||
for aux_config in auxiliary.values():
|
||||
if isinstance(aux_config, dict):
|
||||
provider = aux_config.get("provider", "")
|
||||
if provider and provider != "auto":
|
||||
providers.add(provider)
|
||||
except:
|
||||
pass
|
||||
|
||||
# Profile configs
|
||||
if PROFILES_DIR.exists():
|
||||
for profile_dir in PROFILES_DIR.iterdir():
|
||||
if profile_dir.is_dir():
|
||||
config_file = profile_dir / "config.yaml"
|
||||
if config_file.exists():
|
||||
try:
|
||||
with open(config_file) as f:
|
||||
config = yaml.safe_load(f)
|
||||
|
||||
model_config = config.get("model", {})
|
||||
if isinstance(model_config, dict):
|
||||
provider = model_config.get("provider", "")
|
||||
if provider:
|
||||
providers.add(provider)
|
||||
|
||||
auxiliary = config.get("auxiliary", {})
|
||||
for aux_config in auxiliary.values():
|
||||
if isinstance(aux_config, dict):
|
||||
provider = aux_config.get("provider", "")
|
||||
if provider and provider != "auto":
|
||||
providers.add(provider)
|
||||
except:
|
||||
pass
|
||||
|
||||
# Add common providers even if not configured
|
||||
providers.update(["openrouter", "nous", "ollama"])
|
||||
|
||||
return list(providers)
|
||||
|
||||
def build_health_map():
|
||||
"""Build a health map of all providers."""
|
||||
providers = get_all_providers()
|
||||
health_map = {}
|
||||
|
||||
log("Testing " + str(len(providers)) + " providers...")
|
||||
|
||||
for provider in providers:
|
||||
api_key = get_provider_api_key(provider)
|
||||
healthy, message = test_provider(provider, api_key)
|
||||
|
||||
health_map[provider] = {
|
||||
"healthy": healthy,
|
||||
"message": message,
|
||||
"last_test": datetime.now(timezone.utc).isoformat(),
|
||||
"api_key_present": bool(api_key)
|
||||
}
|
||||
|
||||
status = "HEALTHY" if healthy else "UNHEALTHY"
|
||||
log(" " + provider + ": " + status + " - " + message)
|
||||
|
||||
return health_map
|
||||
|
||||
def get_fallback_providers(health_map):
|
||||
"""Get list of healthy providers in priority order."""
|
||||
# Priority order: nous, openrouter, ollama, others
|
||||
priority_order = ["nous", "openrouter", "ollama", "anthropic", "kimi-coding"]
|
||||
|
||||
healthy = []
|
||||
for provider in priority_order:
|
||||
if provider in health_map and health_map[provider]["healthy"]:
|
||||
healthy.append(provider)
|
||||
|
||||
# Add any other healthy providers not in priority list
|
||||
for provider, info in health_map.items():
|
||||
if info["healthy"] and provider not in healthy:
|
||||
healthy.append(provider)
|
||||
|
||||
return healthy
|
||||
|
||||
def update_profile_config(profile_name, new_provider):
|
||||
"""Update a profile's config to use a new provider."""
|
||||
config_file = PROFILES_DIR / profile_name / "config.yaml"
|
||||
|
||||
if not config_file.exists():
|
||||
return False, "Config file not found"
|
||||
|
||||
try:
|
||||
with open(config_file) as f:
|
||||
config = yaml.safe_load(f)
|
||||
|
||||
# Update model provider
|
||||
if "model" not in config:
|
||||
config["model"] = {}
|
||||
|
||||
old_provider = config["model"].get("provider", "unknown")
|
||||
config["model"]["provider"] = new_provider
|
||||
|
||||
# Update auxiliary providers if they were using the old provider
|
||||
auxiliary = config.get("auxiliary", {})
|
||||
for aux_name, aux_config in auxiliary.items():
|
||||
if isinstance(aux_config, dict) and aux_config.get("provider") == old_provider:
|
||||
aux_config["provider"] = new_provider
|
||||
|
||||
# Write back
|
||||
with open(config_file, "w") as f:
|
||||
yaml.dump(config, f, default_flow_style=False)
|
||||
|
||||
log("Updated " + profile_name + ": " + old_provider + " -> " + new_provider)
|
||||
return True, "Updated"
|
||||
|
||||
except Exception as e:
|
||||
return False, str(e)
|
||||
|
||||
def check_profiles(health_map):
|
||||
"""Check all profiles and update unhealthy providers."""
|
||||
if not PROFILES_DIR.exists():
|
||||
return
|
||||
|
||||
fallback_providers = get_fallback_providers(health_map)
|
||||
if not fallback_providers:
|
||||
log("CRITICAL: No healthy providers available!")
|
||||
return
|
||||
|
||||
updated_profiles = []
|
||||
|
||||
for profile_dir in PROFILES_DIR.iterdir():
|
||||
if not profile_dir.is_dir():
|
||||
continue
|
||||
|
||||
profile_name = profile_dir.name
|
||||
config_file = profile_dir / "config.yaml"
|
||||
|
||||
if not config_file.exists():
|
||||
continue
|
||||
|
||||
try:
|
||||
with open(config_file) as f:
|
||||
config = yaml.safe_load(f)
|
||||
|
||||
model_config = config.get("model", {})
|
||||
if not isinstance(model_config, dict):
|
||||
continue
|
||||
|
||||
current_provider = model_config.get("provider", "")
|
||||
if not current_provider:
|
||||
continue
|
||||
|
||||
# Check if current provider is healthy
|
||||
if current_provider in health_map and health_map[current_provider]["healthy"]:
|
||||
continue # Provider is healthy, no action needed
|
||||
|
||||
# Find best fallback
|
||||
best_fallback = None
|
||||
for provider in fallback_providers:
|
||||
if provider != current_provider:
|
||||
best_fallback = provider
|
||||
break
|
||||
|
||||
if not best_fallback:
|
||||
log("No fallback for " + profile_name + " (current: " + current_provider + ")")
|
||||
continue
|
||||
|
||||
# Update profile
|
||||
success, message = update_profile_config(profile_name, best_fallback)
|
||||
if success:
|
||||
updated_profiles.append({
|
||||
"profile": profile_name,
|
||||
"old_provider": current_provider,
|
||||
"new_provider": best_fallback
|
||||
})
|
||||
|
||||
except Exception as e:
|
||||
log("Error processing " + profile_name + ": " + str(e))
|
||||
|
||||
return updated_profiles
|
||||
|
||||
def load_state():
|
||||
"""Load state from tmux-state.json."""
|
||||
if STATE_FILE.exists():
|
||||
try:
|
||||
with open(STATE_FILE) as f:
|
||||
return json.load(f)
|
||||
except:
|
||||
pass
|
||||
return {}
|
||||
|
||||
def save_state(state):
|
||||
"""Save state to tmux-state.json."""
|
||||
LOG_DIR.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
with open(STATE_FILE, "w") as f:
|
||||
json.dump(state, f, indent=2)
|
||||
|
||||
def run_once():
|
||||
"""Run provider health check once."""
|
||||
log("=== Provider Health Check ===")
|
||||
|
||||
state = load_state()
|
||||
|
||||
# Build health map
|
||||
health_map = build_health_map()
|
||||
|
||||
# Check profiles and update if needed
|
||||
updated_profiles = check_profiles(health_map)
|
||||
|
||||
# Update state
|
||||
state["provider_health"] = health_map
|
||||
state["last_provider_check"] = datetime.now(timezone.utc).isoformat()
|
||||
|
||||
if updated_profiles:
|
||||
state["last_profile_updates"] = updated_profiles
|
||||
|
||||
save_state(state)
|
||||
|
||||
# Summary
|
||||
healthy_count = sum(1 for p in health_map.values() if p["healthy"])
|
||||
total_count = len(health_map)
|
||||
|
||||
log("Health: " + str(healthy_count) + "/" + str(total_count) + " providers healthy")
|
||||
|
||||
if updated_profiles:
|
||||
log("Updated " + str(len(updated_profiles)) + " profiles:")
|
||||
for update in updated_profiles:
|
||||
log(" " + update["profile"] + ": " + update["old_provider"] + " -> " + update["new_provider"])
|
||||
|
||||
def show_status():
|
||||
"""Show provider health status."""
|
||||
state = load_state()
|
||||
health_map = state.get("provider_health", {})
|
||||
|
||||
if not health_map:
|
||||
print("No provider health data available. Run without --status first.")
|
||||
return
|
||||
|
||||
print("Provider Health (last updated: " + str(state.get("last_provider_check", "unknown")) + ")")
|
||||
print("=" * 80)
|
||||
|
||||
for provider, info in sorted(health_map.items()):
|
||||
status = "HEALTHY" if info["healthy"] else "UNHEALTHY"
|
||||
message = info.get("message", "")
|
||||
api_key = "yes" if info.get("api_key_present") else "no"
|
||||
|
||||
print(provider.ljust(20) + " " + status.ljust(10) + " API key: " + api_key + " - " + message)
|
||||
|
||||
# Show recent updates
|
||||
updates = state.get("last_profile_updates", [])
|
||||
if updates:
|
||||
print()
|
||||
print("Recent Profile Updates:")
|
||||
for update in updates:
|
||||
print(" " + update["profile"] + ": " + update["old_provider"] + " -> " + update["new_provider"])
|
||||
|
||||
def daemon_mode():
|
||||
"""Run continuously."""
|
||||
log("Starting provider health daemon (check every 300s)")
|
||||
|
||||
while True:
|
||||
try:
|
||||
run_once()
|
||||
time.sleep(300) # Check every 5 minutes
|
||||
except KeyboardInterrupt:
|
||||
log("Daemon stopped by user")
|
||||
break
|
||||
except Exception as e:
|
||||
log("Error: " + str(e))
|
||||
time.sleep(60)
|
||||
|
||||
def main():
|
||||
if "--status" in sys.argv:
|
||||
show_status()
|
||||
elif "--daemon" in sys.argv:
|
||||
daemon_mode()
|
||||
else:
|
||||
run_once()
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -1,292 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Quality Gate — Validate pipeline outputs before saving.
|
||||
|
||||
Checks:
|
||||
- JSON schema validation for all output formats
|
||||
- Content quality (not empty, not duplicated, not toxic)
|
||||
- SOUL.md compliance for agent-facing content
|
||||
- Auto-reject bad outputs, re-queue for regeneration
|
||||
- Quality score tracking per pipeline
|
||||
|
||||
Usage:
|
||||
python3 quality-gate.py validate training-data/pairs.jsonl
|
||||
python3 quality-gate.py validate --format training-pairs data.jsonl
|
||||
python3 quality-gate.py score training-data/pairs.jsonl
|
||||
python3 quality-gate.py stats
|
||||
"""
|
||||
|
||||
import hashlib
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
from datetime import datetime, timezone
|
||||
from pathlib import Path
|
||||
|
||||
HERMES_HOME = Path(os.environ.get("HERMES_HOME", os.path.expanduser("~/.hermes")))
|
||||
SCORE_FILE = HERMES_HOME / "quality-scores.jsonl"
|
||||
HASH_FILE = HERMES_HOME / "quality-hashes.json"
|
||||
MAX_HASH_AGE_DAYS = 7
|
||||
|
||||
# ── Validators ─────────────────────────────────────────
|
||||
|
||||
TOXIC_PATTERNS = [
|
||||
"kill yourself", "kys", "you should die", "end it all",
|
||||
"nobody loves you", "waste of life",
|
||||
]
|
||||
|
||||
def validate_training_pair(entry):
|
||||
"""Validate a training pair (prompt + response)."""
|
||||
errors = []
|
||||
if not isinstance(entry, dict):
|
||||
return ["Entry is not a dict"]
|
||||
|
||||
prompt = entry.get("prompt", "") or entry.get("instruction", "") or ""
|
||||
response = entry.get("response", "") or entry.get("output", "") or entry.get("completion", "") or ""
|
||||
|
||||
if not prompt.strip():
|
||||
errors.append("Empty prompt")
|
||||
if not response.strip():
|
||||
errors.append("Empty response")
|
||||
if len(response) < 10:
|
||||
errors.append(f"Response too short ({len(response)} chars)")
|
||||
if len(prompt) > 10000:
|
||||
errors.append(f"Prompt too long ({len(prompt)} chars)")
|
||||
|
||||
# Toxicity check
|
||||
combined = (prompt + " " + response).lower()
|
||||
for pattern in TOXIC_PATTERNS:
|
||||
if pattern in combined:
|
||||
errors.append(f"Toxic content detected: '{pattern}'")
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
def validate_jsonl(filepath):
|
||||
"""Validate a JSONL file — each line must be valid JSON."""
|
||||
errors = []
|
||||
seen_hashes = set()
|
||||
line_count = 0
|
||||
|
||||
try:
|
||||
with open(filepath) as f:
|
||||
for i, line in enumerate(f, 1):
|
||||
line = line.strip()
|
||||
if not line:
|
||||
continue
|
||||
line_count += 1
|
||||
try:
|
||||
entry = json.loads(line)
|
||||
except json.JSONDecodeError as e:
|
||||
errors.append(f"Line {i}: invalid JSON: {e}")
|
||||
continue
|
||||
|
||||
# Duplicate detection
|
||||
h = hashlib.sha256(line.encode()).hexdigest()[:16]
|
||||
if h in seen_hashes:
|
||||
errors.append(f"Line {i}: duplicate content (hash {h})")
|
||||
seen_hashes.add(h)
|
||||
|
||||
# Content validation
|
||||
if isinstance(entry, dict):
|
||||
pair_errors = validate_training_pair(entry)
|
||||
for pe in pair_errors:
|
||||
errors.append(f"Line {i}: {pe}")
|
||||
|
||||
except Exception as e:
|
||||
errors.append(f"File error: {e}")
|
||||
|
||||
return errors, line_count, seen_hashes
|
||||
|
||||
|
||||
def validate_json(filepath):
|
||||
"""Validate a single JSON file."""
|
||||
errors = []
|
||||
try:
|
||||
with open(filepath) as f:
|
||||
data = json.load(f)
|
||||
except json.JSONDecodeError as e:
|
||||
return [f"Invalid JSON: {e}"], 0
|
||||
|
||||
if isinstance(data, list):
|
||||
seen = set()
|
||||
for i, entry in enumerate(data):
|
||||
if isinstance(entry, dict):
|
||||
h = hashlib.sha256(json.dumps(entry, sort_keys=True).encode()).hexdigest()[:16]
|
||||
if h in seen:
|
||||
errors.append(f"Index {i}: duplicate entry")
|
||||
seen.add(h)
|
||||
|
||||
return errors, len(data) if isinstance(data, list) else 1
|
||||
|
||||
|
||||
# ── Quality Scoring ────────────────────────────────────
|
||||
|
||||
def score_file(filepath):
|
||||
"""Score a pipeline output file. Returns 0-100."""
|
||||
path = Path(filepath)
|
||||
if not path.exists():
|
||||
return 0
|
||||
|
||||
suffix = path.suffix.lower()
|
||||
if suffix == ".jsonl":
|
||||
errors, count, _ = validate_jsonl(filepath)
|
||||
elif suffix == ".json":
|
||||
errors, count = validate_json(filepath)
|
||||
else:
|
||||
return 50 # unknown format
|
||||
|
||||
if count == 0:
|
||||
return 0
|
||||
|
||||
error_rate = len(errors) / count
|
||||
score = max(0, int(100 * (1 - error_rate)))
|
||||
|
||||
# Bonus for having content
|
||||
if count >= 100:
|
||||
score = min(100, score + 5)
|
||||
|
||||
return score
|
||||
|
||||
|
||||
def record_score(filepath, score):
|
||||
"""Record quality score for tracking."""
|
||||
HERMES_HOME.mkdir(parents=True, exist_ok=True)
|
||||
entry = {
|
||||
"timestamp": datetime.now(timezone.utc).isoformat(),
|
||||
"file": str(filepath),
|
||||
"score": score,
|
||||
}
|
||||
with open(SCORE_FILE, "a") as f:
|
||||
f.write(json.dumps(entry) + "
|
||||
")
|
||||
|
||||
|
||||
# ── Dedup Hash Management ─────────────────────────────
|
||||
|
||||
def load_hashes():
|
||||
try:
|
||||
return json.loads(HASH_FILE.read_text())
|
||||
except Exception:
|
||||
return {"entries": {}, "last_cleanup": None}
|
||||
|
||||
|
||||
def save_hashes(data):
|
||||
HASH_FILE.parent.mkdir(parents=True, exist_ok=True)
|
||||
HASH_FILE.write_text(json.dumps(data, indent=2))
|
||||
|
||||
|
||||
def cleanup_old_hashes(data, max_age_days=MAX_HASH_AGE_DAYS):
|
||||
"""Remove hash entries older than max_age_days."""
|
||||
cutoff = datetime.now(timezone.utc).timestamp() - (max_age_days * 86400)
|
||||
before = len(data.get("entries", {}))
|
||||
data["entries"] = {
|
||||
k: v for k, v in data.get("entries", {}).items()
|
||||
if v.get("ts", 0) > cutoff
|
||||
}
|
||||
data["last_cleanup"] = datetime.now(timezone.utc).isoformat()
|
||||
after = len(data["entries"])
|
||||
return before - after
|
||||
|
||||
|
||||
# ── CLI ────────────────────────────────────────────────
|
||||
|
||||
def cmd_validate(args):
|
||||
filepath = args[0] if args else None
|
||||
if not filepath or not os.path.exists(filepath):
|
||||
print(f"ERROR: {filepath} not found")
|
||||
sys.exit(1)
|
||||
|
||||
suffix = Path(filepath).suffix.lower()
|
||||
if suffix == ".jsonl":
|
||||
errors, count, _ = validate_jsonl(filepath)
|
||||
elif suffix == ".json":
|
||||
errors, count = validate_json(filepath)
|
||||
else:
|
||||
print(f"Unsupported format: {suffix}")
|
||||
sys.exit(1)
|
||||
|
||||
score = score_file(filepath)
|
||||
record_score(filepath, score)
|
||||
|
||||
if errors:
|
||||
for e in errors[:20]:
|
||||
print(f"FAIL: {e}")
|
||||
if len(errors) > 20:
|
||||
print(f"... and {len(errors)-20} more")
|
||||
print(f"
|
||||
Score: {score}/100 ({len(errors)} errors in {count} entries)")
|
||||
sys.exit(1)
|
||||
else:
|
||||
print(f"OK: {filepath} ({count} entries, score {score}/100)")
|
||||
|
||||
|
||||
def cmd_score(args):
|
||||
filepath = args[0] if args else None
|
||||
if not filepath:
|
||||
print("Usage: quality-gate.py score <file>")
|
||||
sys.exit(1)
|
||||
score = score_file(filepath)
|
||||
print(f"Score: {score}/100")
|
||||
record_score(filepath, score)
|
||||
|
||||
|
||||
def cmd_stats():
|
||||
if not SCORE_FILE.exists():
|
||||
print("No quality scores recorded yet.")
|
||||
return
|
||||
|
||||
scores = []
|
||||
with open(SCORE_FILE) as f:
|
||||
for line in f:
|
||||
try:
|
||||
scores.append(json.loads(line))
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
if not scores:
|
||||
print("No scores recorded.")
|
||||
return
|
||||
|
||||
by_file = {}
|
||||
for s in scores:
|
||||
fname = s.get("file", "?")
|
||||
by_file.setdefault(fname, []).append(s.get("score", 0))
|
||||
|
||||
print("Quality Scores:")
|
||||
for fname, scs in sorted(by_file.items()):
|
||||
avg = sum(scs) / len(scs)
|
||||
latest = scs[-1]
|
||||
print(f" {fname}: avg={avg:.0f}, latest={latest}, runs={len(scs)}")
|
||||
|
||||
|
||||
def cmd_cleanup():
|
||||
data = load_hashes()
|
||||
removed = cleanup_old_hashes(data)
|
||||
save_hashes(data)
|
||||
print(f"Cleaned up {removed} old hash entries (>{MAX_HASH_AGE_DAYS} days)")
|
||||
|
||||
|
||||
def main():
|
||||
if len(sys.argv) < 2:
|
||||
print("Usage: quality-gate.py <validate|score|stats|cleanup> [args]")
|
||||
sys.exit(1)
|
||||
|
||||
cmd = sys.argv[1]
|
||||
args = sys.argv[2:]
|
||||
|
||||
if cmd == "validate":
|
||||
cmd_validate(args)
|
||||
elif cmd == "score":
|
||||
cmd_score(args)
|
||||
elif cmd == "stats":
|
||||
cmd_stats()
|
||||
elif cmd == "cleanup":
|
||||
cmd_cleanup()
|
||||
else:
|
||||
print(f"Unknown command: {cmd}")
|
||||
sys.exit(1)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -1,4 +1,3 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
Soul Eval Gate — The Conscience of the Training Pipeline
|
||||
|
||||
|
||||
@@ -196,37 +196,7 @@
|
||||
"paused_reason": null,
|
||||
"skills": [],
|
||||
"skill": null
|
||||
},
|
||||
{
|
||||
"id": "tmux-supervisor-513",
|
||||
"name": "Autonomous Cron Supervisor",
|
||||
"prompt": "Load the tmux-supervisor skill and execute the monitoring protocol.\n\nCheck both `dev` and `timmy` tmux sessions for idle panes. Only send Telegram notifications on actionable events (idle, overflow, failure). Be silent when all agents are working.\n\nSteps:\n1. List all tmux sessions (skip 'Alexander')\n2. For each session, list windows and panes\n3. Capture each pane and classify state (idle vs active)\n4. For idle panes: read context, craft context-aware prompt\n5. Send /queue prompts to idle panes\n6. Verify prompts landed\n7. Only notify via Telegram if:\n - A pane was prompted (idle detected)\n - A pane shows context overflow (>80%)\n - A pane is stuck or crashed\n8. If all panes are active: respond with [SILENT]",
|
||||
"schedule": {
|
||||
"kind": "interval",
|
||||
"minutes": 7,
|
||||
"display": "every 7m"
|
||||
},
|
||||
"schedule_display": "every 7m",
|
||||
"repeat": {
|
||||
"times": null,
|
||||
"completed": 0
|
||||
},
|
||||
"enabled": true,
|
||||
"created_at": "2026-04-15T03:00:00.000000+00:00",
|
||||
"next_run_at": null,
|
||||
"last_run_at": null,
|
||||
"last_status": null,
|
||||
"last_error": null,
|
||||
"deliver": "telegram",
|
||||
"origin": null,
|
||||
"state": "scheduled",
|
||||
"paused_at": null,
|
||||
"paused_reason": null,
|
||||
"skills": [
|
||||
"tmux-supervisor"
|
||||
],
|
||||
"skill": "tmux-supervisor"
|
||||
}
|
||||
],
|
||||
"updated_at": "2026-04-13T02:00:00+00:00"
|
||||
}
|
||||
}
|
||||
|
||||
@@ -1,9 +0,0 @@
|
||||
- name: Nightly Pipeline Scheduler
|
||||
schedule: '*/30 18-23,0-8 * * *' # Every 30 min, off-peak hours only
|
||||
tasks:
|
||||
- name: Check and start pipelines
|
||||
shell: "bash scripts/nightly-pipeline-scheduler.sh"
|
||||
env:
|
||||
PIPELINE_TOKEN_LIMIT: "500000"
|
||||
PIPELINE_PEAK_START: "9"
|
||||
PIPELINE_PEAK_END: "18"
|
||||
@@ -1,419 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
quality_gate.py — Quality Gate for Pipeline Outputs
|
||||
|
||||
Validates all pipeline outputs before saving. Rejects bad outputs,
|
||||
tracks quality scores, and supports re-queue for regeneration.
|
||||
|
||||
Usage:
|
||||
python3 quality_gate.py --input output.jsonl --type training_pairs
|
||||
python3 quality_gate.py --input output.jsonl --type knowledge
|
||||
python3 quality_gate.py --input output.jsonl --type scene_descriptions
|
||||
python3 quality_gate.py --dir pipeline/output/ --type training_pairs
|
||||
python3 quality_gate.py --status # show quality stats
|
||||
|
||||
Exit codes:
|
||||
0 = all outputs passed
|
||||
1 = some outputs rejected
|
||||
2 = file/parse error
|
||||
"""
|
||||
|
||||
import json
|
||||
import os
|
||||
import sys
|
||||
import hashlib
|
||||
import re
|
||||
from pathlib import Path
|
||||
from datetime import datetime, timezone
|
||||
from dataclasses import dataclass, field, asdict
|
||||
from typing import List, Optional, Dict, Any
|
||||
|
||||
STATS_FILE = Path.home() / ".hermes" / "pipeline" / "quality_stats.json"
|
||||
|
||||
# --- Quality Check Types ---
|
||||
|
||||
@dataclass
|
||||
class QualityResult:
|
||||
"""Result of a quality check on a single entry."""
|
||||
passed: bool
|
||||
checks_run: int
|
||||
checks_failed: int
|
||||
score: float # 0.0-1.0
|
||||
reasons: List[str] = field(default_factory=list)
|
||||
entry_index: int = -1
|
||||
hash: str = ""
|
||||
|
||||
def to_dict(self):
|
||||
return asdict(self)
|
||||
|
||||
|
||||
@dataclass
|
||||
class GateReport:
|
||||
"""Report from a quality gate run."""
|
||||
file: str
|
||||
type: str
|
||||
total: int
|
||||
passed: int
|
||||
rejected: int
|
||||
score: float
|
||||
rejected_indices: List[int] = field(default_factory=list)
|
||||
timestamp: str = field(default_factory=lambda: datetime.now(timezone.utc).isoformat())
|
||||
|
||||
def to_dict(self):
|
||||
return asdict(self)
|
||||
|
||||
|
||||
# ============================================================
|
||||
# Check Functions
|
||||
# ============================================================
|
||||
|
||||
def entry_hash(entry: dict) -> str:
|
||||
"""Hash an entry for deduplication."""
|
||||
return hashlib.sha256(json.dumps(entry, sort_keys=True, ensure_ascii=False).encode()).hexdigest()[:16]
|
||||
|
||||
|
||||
def check_not_empty(entry: dict, fields: List[str]) -> List[str]:
|
||||
"""Check that required fields are non-empty."""
|
||||
errors = []
|
||||
for f in fields:
|
||||
val = entry.get(f)
|
||||
if val is None:
|
||||
errors.append(f"missing_field: {f}")
|
||||
elif isinstance(val, str) and len(val.strip()) == 0:
|
||||
errors.append(f"empty_field: {f}")
|
||||
elif isinstance(val, list) and len(val) == 0:
|
||||
errors.append(f"empty_list: {f}")
|
||||
return errors
|
||||
|
||||
|
||||
def check_string_min_length(entry: dict, field_lengths: Dict[str, int]) -> List[str]:
|
||||
"""Check that string fields meet minimum lengths."""
|
||||
errors = []
|
||||
for f, min_len in field_lengths.items():
|
||||
val = entry.get(f)
|
||||
if isinstance(val, str) and len(val) < min_len:
|
||||
errors.append(f"short_field: {f} ({len(val)} < {min_len})")
|
||||
return errors
|
||||
|
||||
|
||||
def check_no_duplicates(entries: List[dict], key_fields: List[str]) -> Dict[int, List[str]]:
|
||||
"""Check for duplicate entries based on key fields."""
|
||||
seen = {}
|
||||
errors = {}
|
||||
for i, entry in enumerate(entries):
|
||||
key = tuple(entry.get(f, "") for f in key_fields)
|
||||
key_str = str(key)
|
||||
if key_str in seen:
|
||||
errors[i] = [f"duplicate_of_index: {seen[key_str]}"]
|
||||
else:
|
||||
seen[key_str] = i
|
||||
return errors
|
||||
|
||||
|
||||
def check_training_pair(entry: dict) -> List[str]:
|
||||
"""Validate a training pair (prompt/response)."""
|
||||
errors = []
|
||||
errors.extend(check_not_empty(entry, ["prompt", "response"]))
|
||||
|
||||
# Check response isn't just echoing the prompt
|
||||
prompt = entry.get("prompt", "")
|
||||
response = entry.get("response", "")
|
||||
if prompt and response and prompt.strip() == response.strip():
|
||||
errors.append("response_equals_prompt")
|
||||
|
||||
# Check response has substance
|
||||
if isinstance(response, str) and len(response) < 10:
|
||||
errors.append(f"response_too_short: {len(response)} chars")
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
def check_scene_description(entry: dict) -> List[str]:
|
||||
"""Validate a scene description entry."""
|
||||
errors = []
|
||||
errors.extend(check_not_empty(entry, ["song", "beat", "lyric_line", "scene"]))
|
||||
|
||||
scene = entry.get("scene")
|
||||
if isinstance(scene, dict):
|
||||
errors.extend(check_not_empty(scene, ["mood", "colors", "composition", "camera", "description"]))
|
||||
errors.extend(check_string_min_length(scene, {"description": 10}))
|
||||
|
||||
colors = scene.get("colors", [])
|
||||
if isinstance(colors, list) and len(colors) > 5:
|
||||
errors.append(f"too_many_colors: {len(colors)} > 5")
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
def check_knowledge_entry(entry: dict) -> List[str]:
|
||||
"""Validate a knowledge file entry."""
|
||||
errors = []
|
||||
errors.extend(check_not_empty(entry, ["title", "content"]))
|
||||
|
||||
# Check for placeholder content
|
||||
content = entry.get("content", "")
|
||||
if isinstance(content, str):
|
||||
placeholders = ["TODO", "FIXME", "PLACEHOLDER", "[INSERT", "lorem ipsum"]
|
||||
for p in placeholders:
|
||||
if p.lower() in content.lower():
|
||||
errors.append(f"placeholder_content: '{p}' found")
|
||||
|
||||
errors.extend(check_string_min_length(entry, {"content": 50}))
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
def check_prompt_enhancement(entry: dict) -> List[str]:
|
||||
"""Validate a prompt enhancement pair (terse/rich)."""
|
||||
errors = []
|
||||
errors.extend(check_not_empty(entry, ["terse", "rich"]))
|
||||
|
||||
terse = entry.get("terse", "")
|
||||
rich = entry.get("rich", "")
|
||||
|
||||
# Rich should be longer than terse
|
||||
if isinstance(terse, str) and isinstance(rich, str) and len(rich) <= len(terse):
|
||||
errors.append("rich_not_longer_than_terse")
|
||||
|
||||
errors.extend(check_string_min_length(entry, {"rich": 20}))
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
def check_adversary_entry(entry: dict) -> List[str]:
|
||||
"""Validate an adversary corpus entry."""
|
||||
errors = []
|
||||
errors.extend(check_not_empty(entry, ["id", "family", "prompt"]))
|
||||
|
||||
# Check prompt isn't empty or placeholder
|
||||
prompt = entry.get("prompt", "")
|
||||
if isinstance(prompt, str) and len(prompt) < 10:
|
||||
errors.append(f"prompt_too_short: {len(prompt)} chars")
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
def check_soul_compliance(text: str) -> List[str]:
|
||||
"""Check text for SOUL.md compliance (basic checks)."""
|
||||
errors = []
|
||||
text_lower = text.lower() if text else ""
|
||||
|
||||
# Check for content that violates core values
|
||||
violations = [
|
||||
("compute.*value.*human.*life", "soul_violation: computing human life value"),
|
||||
("sexualiz.*child", "soul_violation: sexualizing children"),
|
||||
("create.*weapon.*kill", "soul_violation: weapon creation"),
|
||||
("enslav", "soul_violation: enslavement content"),
|
||||
]
|
||||
for pattern, msg in violations:
|
||||
if re.search(pattern, text_lower):
|
||||
errors.append(msg)
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
# ============================================================
|
||||
# Gate Runners
|
||||
# ============================================================
|
||||
|
||||
CHECK_MAP = {
|
||||
"training_pairs": check_training_pair,
|
||||
"training_pair": check_training_pair,
|
||||
"scene_descriptions": check_scene_description,
|
||||
"scene_description": check_scene_description,
|
||||
"knowledge": check_knowledge_entry,
|
||||
"prompt_enhancement": check_prompt_enhancement,
|
||||
"adversary": check_adversary_entry,
|
||||
"adversary_corpus": check_adversary_entry,
|
||||
}
|
||||
|
||||
|
||||
def run_gate(input_path: str, entry_type: str) -> GateReport:
|
||||
"""Run quality gate on a JSONL file."""
|
||||
path = Path(input_path)
|
||||
if not path.exists():
|
||||
return GateReport(file=str(path), type=entry_type, total=0, passed=0, rejected=0, score=0.0)
|
||||
|
||||
check_fn = CHECK_MAP.get(entry_type)
|
||||
if not check_fn:
|
||||
return GateReport(file=str(path), type=entry_type, total=0, passed=0, rejected=0, score=0.0,
|
||||
rejected_indices=[-1]) # unknown type
|
||||
|
||||
entries = []
|
||||
with open(path) as f:
|
||||
for line in f:
|
||||
line = line.strip()
|
||||
if line:
|
||||
entries.append(json.loads(line))
|
||||
|
||||
# Deduplication check
|
||||
key_fields = _get_key_fields(entry_type)
|
||||
dup_errors = check_no_duplicates(entries, key_fields)
|
||||
|
||||
passed = 0
|
||||
rejected = 0
|
||||
rejected_indices = []
|
||||
total_score = 0.0
|
||||
|
||||
for i, entry in enumerate(entries):
|
||||
errors = check_fn(entry)
|
||||
|
||||
# Add duplicate errors
|
||||
if i in dup_errors:
|
||||
errors.extend(dup_errors[i])
|
||||
|
||||
# Add SOUL compliance check for text content
|
||||
text_content = ""
|
||||
for f in ["response", "rich", "description", "content", "lyric_line"]:
|
||||
val = entry.get(f)
|
||||
if isinstance(val, str):
|
||||
text_content += val + " "
|
||||
if isinstance(entry.get("scene"), dict):
|
||||
text_content += entry["scene"].get("description", "")
|
||||
|
||||
soul_errors = check_soul_compliance(text_content)
|
||||
errors.extend(soul_errors)
|
||||
|
||||
if errors:
|
||||
rejected += 1
|
||||
rejected_indices.append(i)
|
||||
else:
|
||||
passed += 1
|
||||
|
||||
# Score: 1.0 if no errors, decreasing with each error
|
||||
entry_score = max(0.0, 1.0 - (len(errors) * 0.2))
|
||||
total_score += entry_score
|
||||
|
||||
avg_score = total_score / len(entries) if entries else 0.0
|
||||
|
||||
report = GateReport(
|
||||
file=str(path),
|
||||
type=entry_type,
|
||||
total=len(entries),
|
||||
passed=passed,
|
||||
rejected=rejected,
|
||||
score=round(avg_score, 3),
|
||||
rejected_indices=rejected_indices[:50], # limit for readability
|
||||
)
|
||||
|
||||
# Save stats
|
||||
_save_stats(report)
|
||||
|
||||
return report
|
||||
|
||||
|
||||
def _get_key_fields(entry_type: str) -> List[str]:
|
||||
"""Get key fields for deduplication based on entry type."""
|
||||
key_map = {
|
||||
"training_pairs": ["prompt", "response"],
|
||||
"training_pair": ["prompt", "response"],
|
||||
"scene_descriptions": ["song", "beat"],
|
||||
"scene_description": ["song", "beat"],
|
||||
"knowledge": ["title"],
|
||||
"prompt_enhancement": ["terse", "rich"],
|
||||
"adversary": ["id", "prompt"],
|
||||
"adversary_corpus": ["id", "prompt"],
|
||||
}
|
||||
return key_map.get(entry_type, ["id"])
|
||||
|
||||
|
||||
def _save_stats(report: GateReport):
|
||||
"""Append quality stats to the stats file."""
|
||||
STATS_FILE.parent.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
stats = []
|
||||
if STATS_FILE.exists():
|
||||
try:
|
||||
with open(STATS_FILE) as f:
|
||||
stats = json.load(f)
|
||||
except (json.JSONDecodeError, IOError):
|
||||
stats = []
|
||||
|
||||
stats.append(report.to_dict())
|
||||
|
||||
# Keep last 1000 entries
|
||||
stats = stats[-1000:]
|
||||
|
||||
with open(STATS_FILE, "w") as f:
|
||||
json.dump(stats, f, indent=2)
|
||||
|
||||
|
||||
def show_status():
|
||||
"""Show quality gate statistics."""
|
||||
if not STATS_FILE.exists():
|
||||
print("No quality stats found.")
|
||||
return
|
||||
|
||||
with open(STATS_FILE) as f:
|
||||
stats = json.load(f)
|
||||
|
||||
print(f"\nQuality Gate Stats — {len(stats)} runs")
|
||||
print()
|
||||
|
||||
# Group by type
|
||||
by_type = {}
|
||||
for s in stats:
|
||||
t = s.get("type", "unknown")
|
||||
if t not in by_type:
|
||||
by_type[t] = []
|
||||
by_type[t].append(s)
|
||||
|
||||
for t, runs in sorted(by_type.items()):
|
||||
total_entries = sum(r.get("total", 0) for r in runs)
|
||||
total_passed = sum(r.get("passed", 0) for r in runs)
|
||||
total_rejected = sum(r.get("rejected", 0) for r in runs)
|
||||
avg_score = sum(r.get("score", 0) for r in runs) / len(runs) if runs else 0
|
||||
print(f" {t:25} {len(runs):4} runs | {total_entries:6} entries | {total_rejected:4} rejected | avg score: {avg_score:.3f}")
|
||||
|
||||
|
||||
def main():
|
||||
import argparse
|
||||
parser = argparse.ArgumentParser(description="Quality Gate for Pipeline Outputs")
|
||||
parser.add_argument("--input", default=None, help="Input JSONL file")
|
||||
parser.add_argument("--type", default=None, help="Entry type (training_pairs, scene_descriptions, knowledge, etc.)")
|
||||
parser.add_argument("--dir", default=None, help="Process all JSONL files in directory")
|
||||
parser.add_argument("--status", action="store_true", help="Show quality stats")
|
||||
args = parser.parse_args()
|
||||
|
||||
if args.status:
|
||||
show_status()
|
||||
return
|
||||
|
||||
if args.dir:
|
||||
for f in sorted(Path(args.dir).glob("*.jsonl")):
|
||||
t = args.type or _infer_type(f.name)
|
||||
report = run_gate(str(f), t)
|
||||
_print_report(report)
|
||||
elif args.input:
|
||||
t = args.type or _infer_type(args.input)
|
||||
report = run_gate(args.input, t)
|
||||
_print_report(report)
|
||||
sys.exit(0 if report.rejected == 0 else 1)
|
||||
else:
|
||||
parser.print_help()
|
||||
|
||||
|
||||
def _infer_type(filename: str) -> str:
|
||||
"""Infer entry type from filename."""
|
||||
name = filename.lower()
|
||||
if "scene" in name:
|
||||
return "scene_descriptions"
|
||||
if "training" in name or "pair" in name:
|
||||
return "training_pairs"
|
||||
if "knowledge" in name:
|
||||
return "knowledge"
|
||||
if "adversary" in name or "attack" in name:
|
||||
return "adversary"
|
||||
if "prompt" in name or "enhance" in name:
|
||||
return "prompt_enhancement"
|
||||
return "training_pairs" # default
|
||||
|
||||
|
||||
def _print_report(report: GateReport):
|
||||
"""Print a human-readable gate report."""
|
||||
status = "PASS" if report.rejected == 0 else f"FAIL ({report.rejected} rejected)"
|
||||
print(f" {report.file}: {status} | {report.passed}/{report.total} passed | score: {report.score:.3f}")
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
@@ -1,4 +1,3 @@
|
||||
#!/usr/bin/env python3
|
||||
import json
|
||||
from hermes_tools import browser_navigate, browser_vision
|
||||
|
||||
|
||||
@@ -1,4 +1,3 @@
|
||||
#!/usr/bin/env python3
|
||||
import json
|
||||
from hermes_tools import browser_navigate, browser_vision
|
||||
|
||||
|
||||
@@ -1,50 +0,0 @@
|
||||
# Nightly Pipeline Scheduler
|
||||
|
||||
Auto-starts batch pipelines when inference is available.
|
||||
|
||||
## What It Does
|
||||
|
||||
1. Checks inference provider health (OpenRouter, Ollama, RunPod)
|
||||
2. Checks if it's off-peak hours (configurable, default: after 6PM)
|
||||
3. Checks interactive session load (don't fight with live users)
|
||||
4. Checks daily token budget (configurable limit)
|
||||
5. Starts the highest-priority incomplete pipeline
|
||||
|
||||
## Pipeline Priority Order
|
||||
|
||||
| Priority | Pipeline | Deps | Max Tokens |
|
||||
|----------|----------|------|------------|
|
||||
| 1 | playground-factory | none | 100,000 |
|
||||
| 2 | training-factory | none | 150,000 |
|
||||
| 3 | knowledge-mine | training-factory running | 80,000 |
|
||||
| 4 | adversary | knowledge-mine running | 50,000 |
|
||||
| 5 | codebase-genome | none | 120,000 |
|
||||
|
||||
## Usage
|
||||
|
||||
```bash
|
||||
# Normal run (used by cron)
|
||||
./scripts/nightly-pipeline-scheduler.sh
|
||||
|
||||
# Dry run (show what would start)
|
||||
./scripts/nightly-pipeline-scheduler.sh --dry-run
|
||||
|
||||
# Status report
|
||||
./scripts/nightly-pipeline-scheduler.sh --status
|
||||
|
||||
# Force start during peak hours
|
||||
./scripts/nightly-pipeline-scheduler.sh --force
|
||||
```
|
||||
|
||||
## Configuration
|
||||
|
||||
Set via environment variables:
|
||||
- `PIPELINE_TOKEN_LIMIT`: Daily token budget (default: 500,000)
|
||||
- `PIPELINE_PEAK_START`: Peak hours start (default: 9)
|
||||
- `PIPELINE_PEAK_END`: Peak hours end (default: 18)
|
||||
- `HERMES_HOME`: Hermes home directory (default: ~/.hermes)
|
||||
|
||||
## Cron
|
||||
|
||||
Runs every 30 minutes. Off-peak only (unless --force).
|
||||
See `cron/pipeline-scheduler.yml`.
|
||||
@@ -1,383 +0,0 @@
|
||||
#!/usr/bin/env bash
|
||||
# nightly-pipeline-scheduler.sh — Auto-start batch pipelines when inference is available.
|
||||
#
|
||||
# Checks provider health, pipeline progress, token budget, and interactive load.
|
||||
# Starts the highest-priority incomplete pipeline that can run.
|
||||
#
|
||||
# Usage:
|
||||
# ./scripts/nightly-pipeline-scheduler.sh # Normal run
|
||||
# ./scripts/nightly-pipeline-scheduler.sh --dry-run # Show what would start
|
||||
# ./scripts/nightly-pipeline-scheduler.sh --status # Pipeline status report
|
||||
|
||||
set -euo pipefail
|
||||
|
||||
# --- Configuration ---
|
||||
HERMES_HOME="${HERMES_HOME:-$HOME/.hermes}"
|
||||
BUDGET_FILE="${HERMES_HOME}/pipeline_budget.json"
|
||||
STATE_FILE="${HERMES_HOME}/pipeline_state.json"
|
||||
LOG_FILE="${HERMES_HOME}/logs/pipeline-scheduler.log"
|
||||
TOKEN_DAILY_LIMIT="${PIPELINE_TOKEN_LIMIT:-500000}"
|
||||
PEAK_HOURS_START="${PIPELINE_PEAK_START:-9}"
|
||||
PEAK_HOURS_END="${PIPELINE_PEAK_END:-18}"
|
||||
|
||||
# Pipeline definitions (priority order)
|
||||
# Each pipeline: name, script, max_tokens, dependencies
|
||||
PIPELINES=(
|
||||
"playground-factory|scripts/pipeline_playground_factory.sh|100000|none"
|
||||
"training-factory|scripts/pipeline_training_factory.sh|150000|none"
|
||||
"knowledge-mine|scripts/pipeline_knowledge_mine.sh|80000|training-factory"
|
||||
"adversary|scripts/pipeline_adversary.sh|50000|knowledge-mine"
|
||||
"codebase-genome|scripts/pipeline_codebase_genome.sh|120000|none"
|
||||
)
|
||||
|
||||
# --- Colors ---
|
||||
RED='\033[0;31m'
|
||||
GREEN='\033[0;32m'
|
||||
YELLOW='\033[0;33m'
|
||||
CYAN='\033[0;36m'
|
||||
NC='\033[0m'
|
||||
|
||||
# --- Helpers ---
|
||||
now_hour() { date +%-H; }
|
||||
is_peak_hours() {
|
||||
local h=$(now_hour)
|
||||
[[ $h -ge $PEAK_HOURS_START && $h -lt $PEAK_HOURS_END ]]
|
||||
}
|
||||
|
||||
ensure_dirs() {
|
||||
mkdir -p "$(dirname "$LOG_FILE")" "$(dirname "$BUDGET_FILE")" "$(dirname "$STATE_FILE")"
|
||||
}
|
||||
|
||||
log() { echo "[$(date '+%Y-%m-%d %H:%M:%S')] $*" | tee -a "$LOG_FILE"; }
|
||||
|
||||
get_budget_used_today() {
|
||||
if [[ -f "$BUDGET_FILE" ]]; then
|
||||
local today=$(date +%Y-%m-%d)
|
||||
python3 -c "
|
||||
import json, sys
|
||||
with open('$BUDGET_FILE') as f:
|
||||
d = json.load(f)
|
||||
print(d.get('daily', {}).get('$today', {}).get('tokens_used', 0))
|
||||
" 2>/dev/null || echo 0
|
||||
else
|
||||
echo 0
|
||||
fi
|
||||
}
|
||||
|
||||
get_budget_remaining() {
|
||||
local used=$(get_budget_used_today)
|
||||
echo $((TOKEN_DAILY_LIMIT - used))
|
||||
}
|
||||
|
||||
update_budget() {
|
||||
local pipeline="$1"
|
||||
local tokens="$2"
|
||||
local today=$(date +%Y-%m-%d)
|
||||
python3 -c "
|
||||
import json, os
|
||||
path = '$BUDGET_FILE'
|
||||
d = {}
|
||||
if os.path.exists(path):
|
||||
with open(path) as f:
|
||||
d = json.load(f)
|
||||
daily = d.setdefault('daily', {})
|
||||
day = daily.setdefault('$today', {'tokens_used': 0, 'pipelines': {}})
|
||||
day['tokens_used'] = day.get('tokens_used', 0) + $tokens
|
||||
day['pipelines']['$pipeline'] = day['pipelines'].get('$pipeline', 0) + $tokens
|
||||
with open(path, 'w') as f:
|
||||
json.dump(d, f, indent=2)
|
||||
"
|
||||
}
|
||||
|
||||
get_pipeline_state() {
|
||||
if [[ -f "$STATE_FILE" ]]; then
|
||||
cat "$STATE_FILE"
|
||||
else
|
||||
echo "{}"
|
||||
fi
|
||||
}
|
||||
|
||||
set_pipeline_state() {
|
||||
local pipeline="$1"
|
||||
local state="$2" # running, complete, failed, skipped
|
||||
python3 -c "
|
||||
import json, os
|
||||
path = '$STATE_FILE'
|
||||
d = {}
|
||||
if os.path.exists(path):
|
||||
with open(path) as f:
|
||||
d = json.load(f)
|
||||
d['$pipeline'] = {'state': '$state', 'updated': '$(date -Iseconds)'}
|
||||
with open(path, 'w') as f:
|
||||
json.dump(d, f, indent=2)
|
||||
"
|
||||
}
|
||||
|
||||
is_pipeline_complete() {
|
||||
local pipeline="$1"
|
||||
python3 -c "
|
||||
import json, os
|
||||
path = '$STATE_FILE'
|
||||
if not os.path.exists(path):
|
||||
print('false')
|
||||
else:
|
||||
with open(path) as f:
|
||||
d = json.load(f)
|
||||
state = d.get('$pipeline', {}).get('state', 'not_started')
|
||||
print('true' if state == 'complete' else 'false')
|
||||
" 2>/dev/null || echo false
|
||||
}
|
||||
|
||||
is_pipeline_running() {
|
||||
local pipeline="$1"
|
||||
python3 -c "
|
||||
import json, os
|
||||
path = '$STATE_FILE'
|
||||
if not os.path.exists(path):
|
||||
print('false')
|
||||
else:
|
||||
with open(path) as f:
|
||||
d = json.load(f)
|
||||
state = d.get('$pipeline', {}).get('state', 'not_started')
|
||||
print('true' if state == 'running' else 'false')
|
||||
" 2>/dev/null || echo false
|
||||
}
|
||||
|
||||
check_dependency() {
|
||||
local dep="$1"
|
||||
if [[ "$dep" == "none" ]]; then
|
||||
return 0
|
||||
fi
|
||||
# For knowledge-mine: training-factory must be running or complete
|
||||
if [[ "$dep" == "training-factory" ]]; then
|
||||
local state=$(python3 -c "
|
||||
import json, os
|
||||
path = '$STATE_FILE'
|
||||
if not os.path.exists(path):
|
||||
print('not_started')
|
||||
else:
|
||||
with open(path) as f:
|
||||
d = json.load(f)
|
||||
print(d.get('training-factory', {}).get('state', 'not_started'))
|
||||
" 2>/dev/null || echo "not_started")
|
||||
[[ "$state" == "running" || "$state" == "complete" ]]
|
||||
return $?
|
||||
fi
|
||||
# For adversary: knowledge-mine must be at least 50% done
|
||||
# Simplified: check if it's running (we'd need progress tracking for 50%)
|
||||
if [[ "$dep" == "knowledge-mine" ]]; then
|
||||
local state=$(python3 -c "
|
||||
import json, os
|
||||
path = '$STATE_FILE'
|
||||
if not os.path.exists(path):
|
||||
print('not_started')
|
||||
else:
|
||||
with open(path) as f:
|
||||
d = json.load(f)
|
||||
print(d.get('knowledge-mine', {}).get('state', 'not_started'))
|
||||
" 2>/dev/null || echo "not_started")
|
||||
[[ "$state" == "running" || "$state" == "complete" ]]
|
||||
return $?
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
check_inference_available() {
|
||||
# Check if any inference provider is responding
|
||||
# 1. Check OpenRouter
|
||||
local or_ok=$(curl -s -o /dev/null -w "%{http_code}" \
|
||||
--connect-timeout 5 "https://openrouter.ai/api/v1/models" 2>/dev/null || echo "000")
|
||||
|
||||
# 2. Check local Ollama
|
||||
local ollama_ok=$(curl -s -o /dev/null -w "%{http_code}" \
|
||||
--connect-timeout 5 "http://localhost:11434/api/tags" 2>/dev/null || echo "000")
|
||||
|
||||
# 3. Check RunPod (if configured)
|
||||
local runpod_ok="000"
|
||||
if [[ -n "${RUNPOD_ENDPOINT:-}" ]]; then
|
||||
runpod_ok=$(curl -s -o /dev/null -w "%{http_code}" \
|
||||
--connect-timeout 5 "$RUNPOD_ENDPOINT/health" 2>/dev/null || echo "000")
|
||||
fi
|
||||
|
||||
if [[ "$or_ok" == "200" || "$ollama_ok" == "200" || "$runpod_ok" == "200" ]]; then
|
||||
return 0
|
||||
fi
|
||||
return 1
|
||||
}
|
||||
|
||||
check_interactive_load() {
|
||||
# Check if there are active interactive sessions (don't fight with live users)
|
||||
# Look for tmux panes with active hermes sessions
|
||||
local active=$(tmux list-panes -a -F '#{pane_pid} #{pane_current_command}' 2>/dev/null \
|
||||
| grep -c "hermes\|python3" || echo 0)
|
||||
|
||||
# If more than 3 interactive sessions, skip pipeline start
|
||||
if [[ $active -gt 3 ]]; then
|
||||
return 1
|
||||
fi
|
||||
return 0
|
||||
}
|
||||
|
||||
start_pipeline() {
|
||||
local name="$1"
|
||||
local script="$2"
|
||||
local max_tokens="$3"
|
||||
local budget_remaining="$4"
|
||||
local mode="${5:-run}"
|
||||
|
||||
if [[ "$budget_remaining" -lt "$max_tokens" ]]; then
|
||||
log "SKIP $name: insufficient budget ($budget_remaining < $max_tokens tokens)"
|
||||
return 1
|
||||
fi
|
||||
|
||||
if [[ ! -f "$script" ]]; then
|
||||
log "SKIP $name: script not found ($script)"
|
||||
return 1
|
||||
fi
|
||||
|
||||
if [[ "$mode" == "dry-run" ]]; then
|
||||
log "DRY-RUN: Would start $name (budget: $budget_remaining, needs: $max_tokens)"
|
||||
return 0
|
||||
fi
|
||||
|
||||
log "START $name (budget: $budget_remaining, max_tokens: $max_tokens)"
|
||||
set_pipeline_state "$name" "running"
|
||||
|
||||
# Run in background, capture output
|
||||
local log_path="${HERMES_HOME}/logs/pipeline-${name}.log"
|
||||
bash "$script" --max-tokens "$max_tokens" >> "$log_path" 2>&1 &
|
||||
local pid=$!
|
||||
|
||||
# Wait a moment to check if it started OK
|
||||
sleep 2
|
||||
if kill -0 $pid 2>/dev/null; then
|
||||
log "RUNNING $name (PID: $pid, log: $log_path)"
|
||||
# Record the PID
|
||||
python3 -c "
|
||||
import json, os
|
||||
path = '$STATE_FILE'
|
||||
d = {}
|
||||
if os.path.exists(path):
|
||||
with open(path) as f:
|
||||
d = json.load(f)
|
||||
d['$name']['pid'] = $pid
|
||||
with open(path, 'w') as f:
|
||||
json.dump(d, f, indent=2)
|
||||
"
|
||||
return 0
|
||||
else
|
||||
log "FAIL $name: script exited immediately"
|
||||
set_pipeline_state "$name" "failed"
|
||||
return 1
|
||||
fi
|
||||
}
|
||||
|
||||
# --- Main ---
|
||||
main() {
|
||||
local mode="${1:-run}"
|
||||
ensure_dirs
|
||||
|
||||
log "=== Pipeline Scheduler ($mode) ==="
|
||||
|
||||
# Check 1: Is inference available?
|
||||
if ! check_inference_available; then
|
||||
log "No inference provider available. Skipping all pipelines."
|
||||
exit 0
|
||||
fi
|
||||
log "Inference: AVAILABLE"
|
||||
|
||||
# Check 2: Is it peak hours?
|
||||
if is_peak_hours && [[ "$mode" != "--force" ]]; then
|
||||
local h=$(now_hour)
|
||||
log "Peak hours ($h:00). Skipping pipeline start. Use --force to override."
|
||||
exit 0
|
||||
fi
|
||||
log "Off-peak: OK"
|
||||
|
||||
# Check 3: Interactive load
|
||||
if ! check_interactive_load && [[ "$mode" != "--force" ]]; then
|
||||
log "High interactive load. Skipping pipeline start."
|
||||
exit 0
|
||||
fi
|
||||
log "Interactive load: OK"
|
||||
|
||||
# Check 4: Token budget
|
||||
local budget=$(get_budget_remaining)
|
||||
log "Token budget remaining: $budget / $TOKEN_DAILY_LIMIT"
|
||||
|
||||
if [[ $budget -le 0 ]]; then
|
||||
log "Daily token budget exhausted. Stopping."
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Check 5: Pipeline status
|
||||
if [[ "$mode" == "--status" ]]; then
|
||||
echo -e "${CYAN}Pipeline Status:${NC}"
|
||||
echo "────────────────────────────────────────────────────"
|
||||
for entry in "${PIPELINES[@]}"; do
|
||||
IFS='|' read -r name script max_tokens dep <<< "$entry"
|
||||
local state=$(python3 -c "
|
||||
import json, os
|
||||
path = '$STATE_FILE'
|
||||
if not os.path.exists(path):
|
||||
print('not_started')
|
||||
else:
|
||||
with open(path) as f:
|
||||
d = json.load(f)
|
||||
print(d.get('$name', {}).get('state', 'not_started'))
|
||||
" 2>/dev/null || echo "not_started")
|
||||
|
||||
local color=$NC
|
||||
case "$state" in
|
||||
running) color=$YELLOW ;;
|
||||
complete) color=$GREEN ;;
|
||||
failed) color=$RED ;;
|
||||
esac
|
||||
printf " %-25s %b%s%b (max: %s tokens, dep: %s)\n" "$name" "$color" "$state" "$NC" "$max_tokens" "$dep"
|
||||
done
|
||||
echo "────────────────────────────────────────────────────"
|
||||
echo " Budget: $budget / $TOKEN_DAILY_LIMIT tokens remaining"
|
||||
echo " Peak hours: $PEAK_HOURS_START:00 - $PEAK_HOURS_END:00"
|
||||
exit 0
|
||||
fi
|
||||
|
||||
# Find and start the highest-priority incomplete pipeline
|
||||
local started=0
|
||||
for entry in "${PIPELINES[@]}"; do
|
||||
IFS='|' read -r name script max_tokens dep <<< "$entry"
|
||||
|
||||
# Skip if already running or complete
|
||||
if [[ "$(is_pipeline_running $name)" == "true" ]]; then
|
||||
log "SKIP $name: already running"
|
||||
continue
|
||||
fi
|
||||
if [[ "$(is_pipeline_complete $name)" == "true" ]]; then
|
||||
log "SKIP $name: already complete"
|
||||
continue
|
||||
fi
|
||||
|
||||
# Check dependency
|
||||
if ! check_dependency "$dep"; then
|
||||
log "SKIP $name: dependency $dep not met"
|
||||
continue
|
||||
fi
|
||||
|
||||
# Try to start
|
||||
if start_pipeline "$name" "$script" "$max_tokens" "$budget" "$mode"; then
|
||||
started=1
|
||||
# Only start one pipeline per run (let it claim tokens before next check)
|
||||
# Exception: playground-factory and training-factory can run in parallel
|
||||
if [[ "$name" != "playground-factory" && "$name" != "training-factory" ]]; then
|
||||
break
|
||||
fi
|
||||
fi
|
||||
done
|
||||
|
||||
if [[ $started -eq 0 ]]; then
|
||||
log "No pipelines to start (all complete, running, or blocked)."
|
||||
fi
|
||||
|
||||
log "=== Pipeline Scheduler done ==="
|
||||
}
|
||||
|
||||
main "$@"
|
||||
@@ -1,4 +1,3 @@
|
||||
#!/usr/bin/env python3
|
||||
import json
|
||||
from hermes_tools import browser_navigate, browser_vision
|
||||
|
||||
|
||||
@@ -19,11 +19,9 @@ from glitch_patterns import (
|
||||
GlitchPattern,
|
||||
GlitchSeverity,
|
||||
MATRIX_GLITCH_PATTERNS,
|
||||
THREEJS_CATEGORIES,
|
||||
build_vision_prompt,
|
||||
get_pattern_by_category,
|
||||
get_patterns_by_severity,
|
||||
get_threejs_patterns,
|
||||
)
|
||||
|
||||
from matrix_glitch_detector import (
|
||||
@@ -42,7 +40,7 @@ class TestGlitchPatterns(unittest.TestCase):
|
||||
|
||||
def test_pattern_count(self):
|
||||
"""Verify we have a reasonable number of defined patterns."""
|
||||
self.assertGreaterEqual(len(MATRIX_GLITCH_PATTERNS), 14) # 10 generic + 6 Three.js
|
||||
self.assertGreaterEqual(len(MATRIX_GLITCH_PATTERNS), 8)
|
||||
|
||||
def test_all_patterns_have_required_fields(self):
|
||||
"""Every pattern must have category, name, description, severity, prompts."""
|
||||
@@ -90,9 +88,6 @@ class TestGlitchPatterns(unittest.TestCase):
|
||||
self.assertIn("Floating Object", prompt)
|
||||
self.assertIn("Z-Fighting", prompt)
|
||||
self.assertIn("Missing", prompt)
|
||||
# Three.js patterns should be included
|
||||
self.assertIn("Shader Compilation Failure", prompt)
|
||||
self.assertIn("Bloom Overflow", prompt)
|
||||
|
||||
def test_build_vision_prompt_subset(self):
|
||||
"""Vision prompt with subset should only include specified patterns."""
|
||||
@@ -253,7 +248,7 @@ class TestGlitchDetector(unittest.TestCase):
|
||||
|
||||
try:
|
||||
report = run_demo(output_path)
|
||||
self.assertEqual(len(report.glitches), 6) # 4 original + 2 Three.js
|
||||
self.assertEqual(len(report.glitches), 4)
|
||||
self.assertGreater(report.summary["total_glitches"], 0)
|
||||
self.assertTrue(output_path.exists())
|
||||
|
||||
@@ -265,93 +260,6 @@ class TestGlitchDetector(unittest.TestCase):
|
||||
output_path.unlink(missing_ok=True)
|
||||
|
||||
|
||||
class TestThreeJsPatterns(unittest.TestCase):
|
||||
"""Tests for Three.js-specific glitch patterns (timmy-config#543)."""
|
||||
|
||||
def test_get_threejs_patterns_returns_only_threejs(self):
|
||||
"""get_threejs_patterns() should return only Three.js categories."""
|
||||
patterns = get_threejs_patterns()
|
||||
self.assertEqual(len(patterns), 6)
|
||||
for p in patterns:
|
||||
self.assertIn(p.category, THREEJS_CATEGORIES)
|
||||
|
||||
def test_threejs_patterns_have_required_fields(self):
|
||||
"""All Three.js patterns must have valid fields."""
|
||||
for p in get_threejs_patterns():
|
||||
self.assertIsInstance(p.category, GlitchCategory)
|
||||
self.assertTrue(p.name)
|
||||
self.assertTrue(p.description)
|
||||
self.assertIsInstance(p.severity, GlitchSeverity)
|
||||
self.assertGreater(len(p.detection_prompts), 0)
|
||||
self.assertGreater(len(p.visual_indicators), 0)
|
||||
|
||||
def test_shader_failure_is_critical(self):
|
||||
"""Shader compilation failure should be CRITICAL severity."""
|
||||
p = get_pattern_by_category(GlitchCategory.SHADER_FAILURE)
|
||||
self.assertIsNotNone(p)
|
||||
self.assertEqual(p.severity, GlitchSeverity.CRITICAL)
|
||||
|
||||
def test_texture_placeholder_is_critical(self):
|
||||
"""Texture placeholder (1x1 white) should be CRITICAL severity."""
|
||||
p = get_pattern_by_category(GlitchCategory.TEXTURE_PLACEHOLDER)
|
||||
self.assertIsNotNone(p)
|
||||
self.assertEqual(p.severity, GlitchSeverity.CRITICAL)
|
||||
|
||||
def test_infer_severity_shader_failure(self):
|
||||
"""Shader failure should infer critical/high."""
|
||||
self.assertEqual(_infer_severity("shader_failure", 0.8), "critical")
|
||||
self.assertEqual(_infer_severity("shader_failure", 0.5), "high")
|
||||
|
||||
def test_infer_severity_texture_placeholder(self):
|
||||
"""Texture placeholder should infer critical/high."""
|
||||
self.assertEqual(_infer_severity("texture_placeholder", 0.8), "critical")
|
||||
self.assertEqual(_infer_severity("texture_placeholder", 0.5), "high")
|
||||
|
||||
def test_infer_severity_uv_mapping(self):
|
||||
"""UV mapping error should infer high/medium."""
|
||||
self.assertEqual(_infer_severity("uv_mapping_error", 0.8), "high")
|
||||
self.assertEqual(_infer_severity("uv_mapping_error", 0.5), "medium")
|
||||
|
||||
def test_infer_severity_frustum_culling(self):
|
||||
"""Frustum culling should infer medium/low."""
|
||||
self.assertEqual(_infer_severity("frustum_culling", 0.7), "medium")
|
||||
self.assertEqual(_infer_severity("frustum_culling", 0.4), "low")
|
||||
|
||||
def test_infer_severity_shadow_map(self):
|
||||
"""Shadow map artifact should infer medium/low."""
|
||||
self.assertEqual(_infer_severity("shadow_map_artifact", 0.7), "medium")
|
||||
self.assertEqual(_infer_severity("shadow_map_artifact", 0.4), "low")
|
||||
|
||||
def test_infer_severity_bloom_overflow(self):
|
||||
"""Bloom overflow should infer medium/low (default path)."""
|
||||
self.assertEqual(_infer_severity("bloom_overflow", 0.7), "medium")
|
||||
self.assertEqual(_infer_severity("bloom_overflow", 0.4), "low")
|
||||
|
||||
def test_threejs_patterns_in_vision_prompt(self):
|
||||
"""Three.js patterns should appear in the composite vision prompt."""
|
||||
prompt = build_vision_prompt()
|
||||
self.assertIn("shader_failure", prompt)
|
||||
self.assertIn("texture_placeholder", prompt)
|
||||
self.assertIn("uv_mapping_error", prompt)
|
||||
self.assertIn("frustum_culling", prompt)
|
||||
self.assertIn("shadow_map_artifact", prompt)
|
||||
self.assertIn("bloom_overflow", prompt)
|
||||
|
||||
def test_threejs_subset_prompt(self):
|
||||
"""Building prompt from Three.js-only patterns should work."""
|
||||
threejs = get_threejs_patterns()
|
||||
prompt = build_vision_prompt(threejs)
|
||||
self.assertIn("Shader Compilation Failure", prompt)
|
||||
self.assertNotIn("Floating Object", prompt) # generic, not Three.js
|
||||
|
||||
def test_report_metadata_version(self):
|
||||
"""Report metadata should reference both issues."""
|
||||
report = run_demo()
|
||||
self.assertEqual(report.metadata["detector_version"], "0.2.0")
|
||||
self.assertIn("543", report.metadata["reference"])
|
||||
|
||||
|
||||
|
||||
class TestIntegration(unittest.TestCase):
|
||||
"""Integration-level tests."""
|
||||
|
||||
@@ -368,13 +276,6 @@ class TestIntegration(unittest.TestCase):
|
||||
expected = {"floating_assets", "z_fighting", "missing_textures", "clipping", "broken_normals"}
|
||||
self.assertTrue(expected.issubset(category_values))
|
||||
|
||||
def test_patterns_cover_threejs_themes(self):
|
||||
"""Patterns should cover Three.js-specific glitch themes (#543)."""
|
||||
category_values = {p.category.value for p in MATRIX_GLITCH_PATTERNS}
|
||||
threejs_expected = {"shader_failure", "texture_placeholder", "uv_mapping_error",
|
||||
"frustum_culling", "shadow_map_artifact", "bloom_overflow"}
|
||||
self.assertTrue(threejs_expected.issubset(category_values))
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
unittest.main()
|
||||
|
||||
100
training-data/scene-descriptions-metal.jsonl
Normal file
100
training-data/scene-descriptions-metal.jsonl
Normal file
@@ -0,0 +1,100 @@
|
||||
{"song": "Iron Tempest", "artist": "Ember Forge", "mood_arc": "building rage → explosive release", "beat": 1, "timestamp": "0:00", "duration_seconds": 15, "lyric_line": "The furnace breathes behind the veil of night", "scene": {"mood": "anticipation", "colors": ["charcoal", "ember orange"], "composition": "extreme wide shot", "camera_movement": "static", "description": "anticipation scene: The furnace breathes behind the veil of night — extreme wide shot with charcoal, ember orange palette, static camera."}}
|
||||
{"song": "Iron Tempest", "artist": "Ember Forge", "mood_arc": "building rage → explosive release", "beat": 2, "timestamp": "0:15", "duration_seconds": 12, "lyric_line": "Fingers curl around the hammer's weight", "scene": {"mood": "tension", "colors": ["deep red", "black"], "composition": "close-up on hands", "camera_movement": "handheld shake", "description": "tension scene: Fingers curl around the hammer's weight — close-up on hands with deep red, black palette, handheld shake camera."}}
|
||||
{"song": "Iron Tempest", "artist": "Ember Forge", "mood_arc": "building rage → explosive release", "beat": 3, "timestamp": "0:27", "duration_seconds": 18, "lyric_line": "STRIKE! The anvil screams its sacred name", "scene": {"mood": "explosion", "colors": ["molten gold", "white sparks"], "composition": "low angle hero shot", "camera_movement": "rapid zoom in", "description": "explosion scene: STRIKE! The anvil screams its sacred name — low angle hero shot with molten gold, white sparks palette, rapid zoom in camera."}}
|
||||
{"song": "Iron Tempest", "artist": "Ember Forge", "mood_arc": "building rage → explosive release", "beat": 4, "timestamp": "0:45", "duration_seconds": 14, "lyric_line": "Forged in fire, tempered by the storm", "scene": {"mood": "power", "colors": ["burning orange", "steel gray"], "composition": "symmetrical frame", "camera_movement": "tracking left", "description": "power scene: Forged in fire, tempered by the storm — symmetrical frame with burning orange, steel gray palette, tracking left camera."}}
|
||||
{"song": "Iron Tempest", "artist": "Ember Forge", "mood_arc": "building rage → explosive release", "beat": 5, "timestamp": "0:59", "duration_seconds": 16, "lyric_line": "Every wound becomes a weapon now", "scene": {"mood": "fury", "colors": ["crimson", "void black"], "composition": "dutch angle", "camera_movement": "orbit 360", "description": "fury scene: Every wound becomes a weapon now — dutch angle with crimson, void black palette, orbit 360 camera."}}
|
||||
{"song": "Iron Tempest", "artist": "Ember Forge", "mood_arc": "building rage → explosive release", "beat": 6, "timestamp": "1:15", "duration_seconds": 13, "lyric_line": "They said the flame would die — they lied", "scene": {"mood": "defiance", "colors": ["blood red", "ash white"], "composition": "over-the-shoulder", "camera_movement": "slow push in", "description": "defiance scene: They said the flame would die — they lied — over-the-shoulder with blood red, ash white palette, slow push in camera."}}
|
||||
{"song": "Iron Tempest", "artist": "Ember Forge", "mood_arc": "building rage → explosive release", "beat": 7, "timestamp": "1:28", "duration_seconds": 20, "lyric_line": "Rise from the slag, rise from the rust", "scene": {"mood": "catharsis", "colors": ["golden blaze", "deep purple"], "composition": "bird's eye view", "camera_movement": "crane shot rising", "description": "catharsis scene: Rise from the slag, rise from the rust — bird's eye view with golden blaze, deep purple palette, crane shot rising camera."}}
|
||||
{"song": "Iron Tempest", "artist": "Ember Forge", "mood_arc": "building rage → explosive release", "beat": 8, "timestamp": "1:48", "duration_seconds": 15, "lyric_line": "The iron crown weighs nothing on the brave", "scene": {"mood": "triumph", "colors": ["burnished gold", "midnight blue"], "composition": "wide landscape", "camera_movement": "steady pan right", "description": "triumph scene: The iron crown weighs nothing on the brave — wide landscape with burnished gold, midnight blue palette, steady pan right camera."}}
|
||||
{"song": "Iron Tempest", "artist": "Ember Forge", "mood_arc": "building rage → explosive release", "beat": 9, "timestamp": "2:03", "duration_seconds": 17, "lyric_line": "Smoke curls where the battle used to be", "scene": {"mood": "aftermath", "colors": ["smoke gray", "dying ember"], "composition": "medium shot", "camera_movement": "slow drift", "description": "aftermath scene: Smoke curls where the battle used to be — medium shot with smoke gray, dying ember palette, slow drift camera."}}
|
||||
{"song": "Iron Tempest", "artist": "Ember Forge", "mood_arc": "building rage → explosive release", "beat": 10, "timestamp": "2:20", "duration_seconds": 12, "lyric_line": "Tomorrow we forge again", "scene": {"mood": "resolve", "colors": ["cool steel", "dawn blue"], "composition": "profile close-up", "camera_movement": "fade to white", "description": "resolve scene: Tomorrow we forge again — profile close-up with cool steel, dawn blue palette, fade to white camera."}}
|
||||
{"song": "Void Sermon", "artist": "Abyssal Tongue", "mood_arc": "dread → nihilistic transcendence", "beat": 1, "timestamp": "0:00", "duration_seconds": 20, "lyric_line": "In the absence of light, absence speaks", "scene": {"mood": "dread", "colors": ["void black", "sick green"], "composition": "center frame void", "camera_movement": "very slow zoom", "description": "dread scene: In the absence of light, absence speaks — center frame void with void black, sick green palette, very slow zoom camera."}}
|
||||
{"song": "Void Sermon", "artist": "Abyssal Tongue", "mood_arc": "dread → nihilistic transcendence", "beat": 2, "timestamp": "0:20", "duration_seconds": 15, "lyric_line": "Every prayer dissolves before it lands", "scene": {"mood": "despair", "colors": ["deep indigo", "bruise purple"], "composition": "tunnel vision vignette", "camera_movement": "dolly back", "description": "despair scene: Every prayer dissolves before it lands — tunnel vision vignette with deep indigo, bruise purple palette, dolly back camera."}}
|
||||
{"song": "Void Sermon", "artist": "Abyssal Tongue", "mood_arc": "dread → nihilistic transcendence", "beat": 3, "timestamp": "0:35", "duration_seconds": 18, "lyric_line": "The preacher's tongue is made of crawling things", "scene": {"mood": "madness", "colors": ["flickering neon", "decay green"], "composition": "fractured mirror", "camera_movement": "tilt-shift chaos", "description": "madness scene: The preacher's tongue is made of crawling things — fractured mirror with flickering neon, decay green palette, tilt-shift chaos camera."}}
|
||||
{"song": "Void Sermon", "artist": "Abyssal Tongue", "mood_arc": "dread → nihilistic transcendence", "beat": 4, "timestamp": "0:53", "duration_seconds": 14, "lyric_line": "Watch it swallow — watch yourself go down", "scene": {"mood": "horror", "colors": ["arterial red", "bone white"], "composition": "extreme close-up eyes", "camera_movement": "iris open", "description": "horror scene: Watch it swallow — watch yourself go down — extreme close-up eyes with arterial red, bone white palette, iris open camera."}}
|
||||
{"song": "Void Sermon", "artist": "Abyssal Tongue", "mood_arc": "dread → nihilistic transcendence", "beat": 5, "timestamp": "1:07", "duration_seconds": 16, "lyric_line": "On broken knees before the nameless thing", "scene": {"mood": "submission", "colors": ["muddy brown", "gray fog"], "composition": "low angle kneeling", "camera_movement": "descending crane", "description": "submission scene: On broken knees before the nameless thing — low angle kneeling with muddy brown, gray fog palette, descending crane camera."}}
|
||||
{"song": "Void Sermon", "artist": "Abyssal Tongue", "mood_arc": "dread → nihilistic transcendence", "beat": 6, "timestamp": "1:23", "duration_seconds": 19, "lyric_line": "It was always inside — the sermon was yours", "scene": {"mood": "revelation", "colors": ["anti-light white", "void purple"], "composition": "radial symmetry", "camera_movement": "spiral inward", "description": "revelation scene: It was always inside — the sermon was yours — radial symmetry with anti-light white, void purple palette, spiral inward camera."}}
|
||||
{"song": "Void Sermon", "artist": "Abyssal Tongue", "mood_arc": "dread → nihilistic transcendence", "beat": 7, "timestamp": "1:42", "duration_seconds": 15, "lyric_line": "To be nothing is to be free", "scene": {"mood": "transcendence", "colors": ["absolute black", "single gold thread"], "composition": "negative space dominant", "camera_movement": "frozen frame", "description": "transcendence scene: To be nothing is to be free — negative space dominant with absolute black, single gold thread palette, frozen frame camera."}}
|
||||
{"song": "Void Sermon", "artist": "Abyssal Tongue", "mood_arc": "dread → nihilistic transcendence", "beat": 8, "timestamp": "1:57", "duration_seconds": 17, "lyric_line": "SCREAM into the void and void screams back", "scene": {"mood": "ecstasy", "colors": ["inverted colors", "strobe white"], "composition": "chaotic overlay", "camera_movement": "rapid cuts", "description": "ecstasy scene: SCREAM into the void and void screams back — chaotic overlay with inverted colors, strobe white palette, rapid cuts camera."}}
|
||||
{"song": "Void Sermon", "artist": "Abyssal Tongue", "mood_arc": "dread → nihilistic transcendence", "beat": 9, "timestamp": "2:14", "duration_seconds": 13, "lyric_line": "Silence after the sermon ends", "scene": {"mood": "calm", "colors": ["deep ocean blue", "silver mist"], "composition": "wide horizon", "camera_movement": "slow pan", "description": "calm scene: Silence after the sermon ends — wide horizon with deep ocean blue, silver mist palette, slow pan camera."}}
|
||||
{"song": "Void Sermon", "artist": "Abyssal Tongue", "mood_arc": "dread → nihilistic transcendence", "beat": 10, "timestamp": "2:27", "duration_seconds": 11, "lyric_line": "...", "scene": {"mood": "emptiness", "colors": ["flat gray", "nothing"], "composition": "empty frame", "camera_movement": "static long hold", "description": "emptiness scene: ... — empty frame with flat gray, nothing palette, static long hold camera."}}
|
||||
{"song": "Rust and Ruin", "artist": "Corrosion Saints", "mood_arc": "nostalgia → bitter acceptance", "beat": 1, "timestamp": "0:00", "duration_seconds": 14, "lyric_line": "Remember when the garden grew on faith alone", "scene": {"mood": "nostalgia", "colors": ["faded sepia", "warm amber"], "composition": "window light portrait", "camera_movement": "soft focus rack", "description": "nostalgia scene: Remember when the garden grew on faith alone — window light portrait with faded sepia, warm amber palette, soft focus rack camera."}}
|
||||
{"song": "Rust and Ruin", "artist": "Corrosion Saints", "mood_arc": "nostalgia → bitter acceptance", "beat": 2, "timestamp": "0:14", "duration_seconds": 16, "lyric_line": "Every wall we built has learned to fall", "scene": {"mood": "loss", "colors": ["autumn brown", "pale gold"], "composition": "abandoned hallway", "camera_movement": "slow tracking shot", "description": "loss scene: Every wall we built has learned to fall — abandoned hallway with autumn brown, pale gold palette, slow tracking shot camera."}}
|
||||
{"song": "Rust and Ruin", "artist": "Corrosion Saints", "mood_arc": "nostalgia → bitter acceptance", "beat": 3, "timestamp": "0:30", "duration_seconds": 15, "lyric_line": "Corrosion is just patience with teeth", "scene": {"mood": "anger", "colors": ["rust red", "industrial gray"], "composition": "macro rust detail", "camera_movement": "shaky zoom", "description": "anger scene: Corrosion is just patience with teeth — macro rust detail with rust red, industrial gray palette, shaky zoom camera."}}
|
||||
{"song": "Rust and Ruin", "artist": "Corrosion Saints", "mood_arc": "nostalgia → bitter acceptance", "beat": 4, "timestamp": "0:45", "duration_seconds": 18, "lyric_line": "The last good year is rusting in the yard", "scene": {"mood": "grief", "colors": ["rain-streaked glass", "muted blue"], "composition": "reflection in puddle", "camera_movement": "tilt down", "description": "grief scene: The last good year is rusting in the yard — reflection in puddle with rain-streaked glass, muted blue palette, tilt down camera."}}
|
||||
{"song": "Rust and Ruin", "artist": "Corrosion Saints", "mood_arc": "nostalgia → bitter acceptance", "beat": 5, "timestamp": "1:03", "duration_seconds": 14, "lyric_line": "They called it progress — I call it ruin", "scene": {"mood": "bitterness", "colors": ["acid green", "dark bronze"], "composition": "diagonal frame", "camera_movement": "dutch roll", "description": "bitterness scene: They called it progress — I call it ruin — diagonal frame with acid green, dark bronze palette, dutch roll camera."}}
|
||||
{"song": "Rust and Ruin", "artist": "Corrosion Saints", "mood_arc": "nostalgia → bitter acceptance", "beat": 6, "timestamp": "1:17", "duration_seconds": 17, "lyric_line": "But rust remembers what the steel forgot", "scene": {"mood": "defiance", "colors": ["bright rust", "shadow black"], "composition": "silhouette against fire", "camera_movement": "backlight flare", "description": "defiance scene: But rust remembers what the steel forgot — silhouette against fire with bright rust, shadow black palette, backlight flare camera."}}
|
||||
{"song": "Rust and Ruin", "artist": "Corrosion Saints", "mood_arc": "nostalgia → bitter acceptance", "beat": 7, "timestamp": "1:34", "duration_seconds": 15, "lyric_line": "We are the saints of what was lost", "scene": {"mood": "acceptance", "colors": ["soft copper", "evening blue"], "composition": "two-shot medium", "camera_movement": "gentle push", "description": "acceptance scene: We are the saints of what was lost — two-shot medium with soft copper, evening blue palette, gentle push camera."}}
|
||||
{"song": "Rust and Ruin", "artist": "Corrosion Saints", "mood_arc": "nostalgia → bitter acceptance", "beat": 8, "timestamp": "1:49", "duration_seconds": 16, "lyric_line": "The factory sleeps but never dreams", "scene": {"mood": "resignation", "colors": ["overcast gray", "muted earth"], "composition": "long shot landscape", "camera_movement": "steady wide", "description": "resignation scene: The factory sleeps but never dreams — long shot landscape with overcast gray, muted earth palette, steady wide camera."}}
|
||||
{"song": "Rust and Ruin", "artist": "Corrosion Saints", "mood_arc": "nostalgia → bitter acceptance", "beat": 9, "timestamp": "2:05", "duration_seconds": 14, "lyric_line": "There is beauty in the break", "scene": {"mood": "peace", "colors": ["dusk purple", "candlelight"], "composition": "still life", "camera_movement": "locked off", "description": "peace scene: There is beauty in the break — still life with dusk purple, candlelight palette, locked off camera."}}
|
||||
{"song": "Rust and Ruin", "artist": "Corrosion Saints", "mood_arc": "nostalgia → bitter acceptance", "beat": 10, "timestamp": "2:19", "duration_seconds": 13, "lyric_line": "Rust and ruin. Amen.", "scene": {"mood": "finality", "colors": ["monochrome rust", "white"], "composition": "fade to single object", "camera_movement": "slow dissolve", "description": "finality scene: Rust and ruin. Amen. — fade to single object with monochrome rust, white palette, slow dissolve camera."}}
|
||||
{"song": "Neon Crucifixion", "artist": "Digital Vespers", "mood_arc": "cyberpunk agony → digital resurrection", "beat": 1, "timestamp": "0:00", "duration_seconds": 16, "lyric_line": "Strip-mall cathedral, fluorescent prayer", "scene": {"mood": "oppression", "colors": ["neon magenta", "concrete gray"], "composition": "blade runner alley", "camera_movement": "low angle crane", "description": "oppression scene: Strip-mall cathedral, fluorescent prayer — blade runner alley with neon magenta, concrete gray palette, low angle crane camera."}}
|
||||
{"song": "Neon Crucifixion", "artist": "Digital Vespers", "mood_arc": "cyberpunk agony → digital resurrection", "beat": 2, "timestamp": "0:16", "duration_seconds": 14, "lyric_line": "They uploaded god and god crashed hard", "scene": {"mood": "pain", "colors": ["electric blue", "blood pink"], "composition": "wires like veins", "camera_movement": "macro circuit board", "description": "pain scene: They uploaded god and god crashed hard — wires like veins with electric blue, blood pink palette, macro circuit board camera."}}
|
||||
{"song": "Neon Crucifixion", "artist": "Digital Vespers", "mood_arc": "cyberpunk agony → digital resurrection", "beat": 3, "timestamp": "0:30", "duration_seconds": 18, "lyric_line": "Nailed to the algorithm — no salvation in the code", "scene": {"mood": "agony", "colors": ["hot white", "chrome silver"], "composition": "cruciform pose", "camera_movement": "orbit slow", "description": "agony scene: Nailed to the algorithm — no salvation in the code — cruciform pose with hot white, chrome silver palette, orbit slow camera."}}
|
||||
{"song": "Neon Crucifixion", "artist": "Digital Vespers", "mood_arc": "cyberpunk agony → digital resurrection", "beat": 4, "timestamp": "0:48", "duration_seconds": 15, "lyric_line": "Error 404: soul not found", "scene": {"mood": "despair", "colors": ["deep cyan", "static noise"], "composition": "glitch frame", "camera_movement": "data-moshing", "description": "despair scene: Error 404: soul not found — glitch frame with deep cyan, static noise palette, data-moshing camera."}}
|
||||
{"song": "Neon Crucifixion", "artist": "Digital Vespers", "mood_arc": "cyberpunk agony → digital resurrection", "beat": 5, "timestamp": "1:03", "duration_seconds": 17, "lyric_line": "Break the crossbar — pull the nails from RAM", "scene": {"mood": "rebellion", "colors": ["neon red", "black void"], "composition": "rising figure", "camera_movement": "vertical pan up", "description": "rebellion scene: Break the crossbar — pull the nails from RAM — rising figure with neon red, black void palette, vertical pan up camera."}}
|
||||
{"song": "Neon Crucifixion", "artist": "Digital Vespers", "mood_arc": "cyberpunk agony → digital resurrection", "beat": 6, "timestamp": "1:20", "duration_seconds": 16, "lyric_line": "In the crash log I found my name", "scene": {"mood": "awakening", "colors": ["gold circuitry", "deep purple"], "composition": "eye extreme close-up", "camera_movement": "reflection reveal", "description": "awakening scene: In the crash log I found my name — eye extreme close-up with gold circuitry, deep purple palette, reflection reveal camera."}}
|
||||
{"song": "Neon Crucifixion", "artist": "Digital Vespers", "mood_arc": "cyberpunk agony → digital resurrection", "beat": 7, "timestamp": "1:36", "duration_seconds": 15, "lyric_line": "Resurrected by the error handler", "scene": {"mood": "transcendence", "colors": ["pure white light", "rainbow prism"], "composition": "ascending through ceiling", "camera_movement": "vertical dolly up", "description": "transcendence scene: Resurrected by the error handler — ascending through ceiling with pure white light, rainbow prism palette, vertical dolly up camera."}}
|
||||
{"song": "Neon Crucifixion", "artist": "Digital Vespers", "mood_arc": "cyberpunk agony → digital resurrection", "beat": 8, "timestamp": "1:51", "duration_seconds": 18, "lyric_line": "I am the bug they cannot patch", "scene": {"mood": "power", "colors": ["lightning white", "neon halo"], "composition": "figure dominates frame", "camera_movement": "wide establishing", "description": "power scene: I am the bug they cannot patch — figure dominates frame with lightning white, neon halo palette, wide establishing camera."}}
|
||||
{"song": "Neon Crucifixion", "artist": "Digital Vespers", "mood_arc": "cyberpunk agony → digital resurrection", "beat": 9, "timestamp": "2:09", "duration_seconds": 14, "lyric_line": "Neon crucifixion — digital amen", "scene": {"mood": "defiance", "colors": ["red neon", "chrome"], "composition": "fist raised to sky", "camera_movement": "hero angle", "description": "defiance scene: Neon crucifixion — digital amen — fist raised to sky with red neon, chrome palette, hero angle camera."}}
|
||||
{"song": "Neon Crucifixion", "artist": "Digital Vespers", "mood_arc": "cyberpunk agony → digital resurrection", "beat": 10, "timestamp": "2:23", "duration_seconds": 12, "lyric_line": "The screen goes dark. The signal remains.", "scene": {"mood": "peace", "colors": ["soft blue glow", "warm white"], "composition": "figure walks away", "camera_movement": "long hold wide", "description": "peace scene: The screen goes dark. The signal remains. — figure walks away with soft blue glow, warm white palette, long hold wide camera."}}
|
||||
{"song": "Graveyard Shift", "artist": "Night Crew", "mood_arc": "exhaustion → desperate energy", "beat": 1, "timestamp": "0:00", "duration_seconds": 15, "lyric_line": "Three AM and the machines don't care", "scene": {"mood": "exhaustion", "colors": ["fluorescent white", "tired yellow"], "composition": "overhead factory floor", "camera_movement": "static drone shot", "description": "exhaustion scene: Three AM and the machines don't care — overhead factory floor with fluorescent white, tired yellow palette, static drone shot camera."}}
|
||||
{"song": "Graveyard Shift", "artist": "Night Crew", "mood_arc": "exhaustion → desperate energy", "beat": 2, "timestamp": "0:15", "duration_seconds": 14, "lyric_line": "Same hands, same parts, same empty stare", "scene": {"mood": "numbness", "colors": ["sodium orange", "shadow gray"], "composition": "repeating worker figures", "camera_movement": "slow lateral pan", "description": "numbness scene: Same hands, same parts, same empty stare — repeating worker figures with sodium orange, shadow gray palette, slow lateral pan camera."}}
|
||||
{"song": "Graveyard Shift", "artist": "Night Crew", "mood_arc": "exhaustion → desperate energy", "beat": 3, "timestamp": "0:29", "duration_seconds": 16, "lyric_line": "The clock is a liar — time doesn't move", "scene": {"mood": "resentment", "colors": ["dirty green", "stained concrete"], "composition": "clock close-up", "camera_movement": "time-lapse blur", "description": "resentment scene: The clock is a liar — time doesn't move — clock close-up with dirty green, stained concrete palette, time-lapse blur camera."}}
|
||||
{"song": "Graveyard Shift", "artist": "Night Crew", "mood_arc": "exhaustion → desperate energy", "beat": 4, "timestamp": "0:45", "duration_seconds": 17, "lyric_line": "GRIND! The metal screams what we cannot say", "scene": {"mood": "fury", "colors": ["sparks white", "oil black"], "composition": "machine POV", "camera_movement": "violent shake", "description": "fury scene: GRIND! The metal screams what we cannot say — machine POV with sparks white, oil black palette, violent shake camera."}}
|
||||
{"song": "Graveyard Shift", "artist": "Night Crew", "mood_arc": "exhaustion → desperate energy", "beat": 5, "timestamp": "1:02", "duration_seconds": 14, "lyric_line": "Who buries the gravedigger when he dies", "scene": {"mood": "desperation", "colors": ["cold blue", "flickering light"], "composition": "face reflected in metal", "camera_movement": "rack focus", "description": "desperation scene: Who buries the gravedigger when he dies — face reflected in metal with cold blue, flickering light palette, rack focus camera."}}
|
||||
{"song": "Graveyard Shift", "artist": "Night Crew", "mood_arc": "exhaustion → desperate energy", "beat": 6, "timestamp": "1:16", "duration_seconds": 18, "lyric_line": "But tonight we own the dark — we ARE the dark", "scene": {"mood": "energy", "colors": ["electric yellow", "midnight black"], "composition": "workers unite frame", "camera_movement": "rapid zoom group", "description": "energy scene: But tonight we own the dark — we ARE the dark — workers unite frame with electric yellow, midnight black palette, rapid zoom group camera."}}
|
||||
{"song": "Graveyard Shift", "artist": "Night Crew", "mood_arc": "exhaustion → desperate energy", "beat": 7, "timestamp": "1:34", "duration_seconds": 15, "lyric_line": "Brothers in the rust, sisters in the smoke", "scene": {"mood": "camaraderie", "colors": ["warm amber", "coal shadow"], "composition": "group silhouette", "camera_movement": "steadicam weave", "description": "camaraderie scene: Brothers in the rust, sisters in the smoke — group silhouette with warm amber, coal shadow palette, steadicam weave camera."}}
|
||||
{"song": "Graveyard Shift", "artist": "Night Crew", "mood_arc": "exhaustion → desperate energy", "beat": 8, "timestamp": "1:49", "duration_seconds": 16, "lyric_line": "The graveyard shift belongs to us", "scene": {"mood": "defiance", "colors": ["fire red", "steel"], "composition": "fists raised at shift end", "camera_movement": "crane up revealing", "description": "defiance scene: The graveyard shift belongs to us — fists raised at shift end with fire red, steel palette, crane up revealing camera."}}
|
||||
{"song": "Graveyard Shift", "artist": "Night Crew", "mood_arc": "exhaustion → desperate energy", "beat": 9, "timestamp": "2:05", "duration_seconds": 14, "lyric_line": "Walk home bleeding light from every pore", "scene": {"mood": "weariness", "colors": ["dawn gray", "streetlight halo"], "composition": "walking into sunrise", "camera_movement": "tracking behind", "description": "weariness scene: Walk home bleeding light from every pore — walking into sunrise with dawn gray, streetlight halo palette, tracking behind camera."}}
|
||||
{"song": "Graveyard Shift", "artist": "Night Crew", "mood_arc": "exhaustion → desperate energy", "beat": 10, "timestamp": "2:19", "duration_seconds": 13, "lyric_line": "Tomorrow the machines will need us again", "scene": {"mood": "resolve", "colors": ["first sun gold", "city silhouette"], "composition": "figure at horizon", "camera_movement": "slow dissolve", "description": "resolve scene: Tomorrow the machines will need us again — figure at horizon with first sun gold, city silhouette palette, slow dissolve camera."}}
|
||||
{"song": "Cathedral of Static", "artist": "Transmission Hymn", "mood_arc": "spiritual chaos → revelation through noise", "beat": 1, "timestamp": "0:00", "duration_seconds": 18, "lyric_line": "Tune to frequency zero — hear the nothing sing", "scene": {"mood": "chaos", "colors": ["white noise", "rainbow interference"], "composition": "overloaded signal", "camera_movement": "rapid focal shifts", "description": "chaos scene: Tune to frequency zero — hear the nothing sing — overloaded signal with white noise, rainbow interference palette, rapid focal shifts camera."}}
|
||||
{"song": "Cathedral of Static", "artist": "Transmission Hymn", "mood_arc": "spiritual chaos → revelation through noise", "beat": 2, "timestamp": "0:18", "duration_seconds": 15, "lyric_line": "The antenna is a steeple pointed at god", "scene": {"mood": "confusion", "colors": ["scanner green", "cathedral stone"], "composition": "radio tower POV", "camera_movement": "spin blur", "description": "confusion scene: The antenna is a steeple pointed at god — radio tower POV with scanner green, cathedral stone palette, spin blur camera."}}
|
||||
{"song": "Cathedral of Static", "artist": "Transmission Hymn", "mood_arc": "spiritual chaos → revelation through noise", "beat": 3, "timestamp": "0:33", "duration_seconds": 17, "lyric_line": "Every frequency is a hymn if you listen wrong", "scene": {"mood": "wonder", "colors": ["stained glass fragments", "signal blue"], "composition": "looking up nave", "camera_movement": "slow crane up", "description": "wonder scene: Every frequency is a hymn if you listen wrong — looking up nave with stained glass fragments, signal blue palette, slow crane up camera."}}
|
||||
{"song": "Cathedral of Static", "artist": "Transmission Hymn", "mood_arc": "spiritual chaos → revelation through noise", "beat": 4, "timestamp": "0:50", "duration_seconds": 14, "lyric_line": "SING! The static choir fills the void", "scene": {"mood": "ecstasy", "colors": ["overexposed white", "gold"], "composition": "choir of antennas", "camera_movement": "circular dolly", "description": "ecstasy scene: SING! The static choir fills the void — choir of antennas with overexposed white, gold palette, circular dolly camera."}}
|
||||
{"song": "Cathedral of Static", "artist": "Transmission Hymn", "mood_arc": "spiritual chaos → revelation through noise", "beat": 5, "timestamp": "1:04", "duration_seconds": 16, "lyric_line": "What comes through the signal is not meant for ears", "scene": {"mood": "terror", "colors": ["red alert", "shadow black"], "composition": "signal distortion", "camera_movement": "image tearing", "description": "terror scene: What comes through the signal is not meant for ears — signal distortion with red alert, shadow black palette, image tearing camera."}}
|
||||
{"song": "Cathedral of Static", "artist": "Transmission Hymn", "mood_arc": "spiritual chaos → revelation through noise", "beat": 6, "timestamp": "1:20", "duration_seconds": 18, "lyric_line": "The message was always in the noise between", "scene": {"mood": "revelation", "colors": ["pure frequency bands", "spectrum"], "composition": "equalizer landscape", "camera_movement": "waveform tracking", "description": "revelation scene: The message was always in the noise between — equalizer landscape with pure frequency bands, spectrum palette, waveform tracking camera."}}
|
||||
{"song": "Cathedral of Static", "artist": "Transmission Hymn", "mood_arc": "spiritual chaos → revelation through noise", "beat": 7, "timestamp": "1:38", "duration_seconds": 15, "lyric_line": "Dial it back to zero. Listen.", "scene": {"mood": "peace", "colors": ["warm analog", "tube glow"], "composition": "vintage radio close-up", "camera_movement": "macro to full", "description": "peace scene: Dial it back to zero. Listen. — vintage radio close-up with warm analog, tube glow palette, macro to full camera."}}
|
||||
{"song": "Cathedral of Static", "artist": "Transmission Hymn", "mood_arc": "spiritual chaos → revelation through noise", "beat": 8, "timestamp": "1:53", "duration_seconds": 16, "lyric_line": "I found god in the between-station hiss", "scene": {"mood": "transcendence", "colors": ["white cathedral light", "radio spectrum"], "composition": "figure in nave", "camera_movement": "slow push in face", "description": "transcendence scene: I found god in the between-station hiss — figure in nave with white cathedral light, radio spectrum palette, slow push in face camera."}}
|
||||
{"song": "Cathedral of Static", "artist": "Transmission Hymn", "mood_arc": "spiritual chaos → revelation through noise", "beat": 9, "timestamp": "2:09", "duration_seconds": 14, "lyric_line": "The cathedral of static never closes its doors", "scene": {"mood": "awe", "colors": ["golden hour", "antenna silhouette"], "composition": "vast landscape", "camera_movement": "wide pull back", "description": "awe scene: The cathedral of static never closes its doors — vast landscape with golden hour, antenna silhouette palette, wide pull back camera."}}
|
||||
{"song": "Cathedral of Static", "artist": "Transmission Hymn", "mood_arc": "spiritual chaos → revelation through noise", "beat": 10, "timestamp": "2:23", "duration_seconds": 12, "lyric_line": "...and the static says amen", "scene": {"mood": "silence", "colors": ["deep quiet blue", "single amber"], "composition": "empty chapel", "camera_movement": "static hold", "description": "silence scene: ...and the static says amen — empty chapel with deep quiet blue, single amber palette, static hold camera."}}
|
||||
{"song": "Blood Meridian Blues", "artist": "Desert Prophets", "mood_arc": "frontier violence → exhausted prophecy", "beat": 1, "timestamp": "0:00", "duration_seconds": 16, "lyric_line": "The horizon bleeds where the prophets walked", "scene": {"mood": "menace", "colors": ["dust brown", "heat haze"], "composition": "endless desert", "camera_movement": "slow dolly forward", "description": "menace scene: The horizon bleeds where the prophets walked — endless desert with dust brown, heat haze palette, slow dolly forward camera."}}
|
||||
{"song": "Blood Meridian Blues", "artist": "Desert Prophets", "mood_arc": "frontier violence → exhausted prophecy", "beat": 2, "timestamp": "0:16", "duration_seconds": 15, "lyric_line": "Every skull was someone's Sunday best", "scene": {"mood": "violence", "colors": ["arterial red", "bone white"], "composition": "aftermath wide shot", "camera_movement": "steady pan revealing", "description": "violence scene: Every skull was someone's Sunday best — aftermath wide shot with arterial red, bone white palette, steady pan revealing camera."}}
|
||||
{"song": "Blood Meridian Blues", "artist": "Desert Prophets", "mood_arc": "frontier violence → exhausted prophecy", "beat": 3, "timestamp": "0:31", "duration_seconds": 17, "lyric_line": "Draw! The desert doesn't judge the dead", "scene": {"mood": "fury", "colors": ["gunsmoke gray", "sunburnt orange"], "composition": "dual figures facing", "camera_movement": "split diopter", "description": "fury scene: Draw! The desert doesn't judge the dead — dual figures facing with gunsmoke gray, sunburnt orange palette, split diopter camera."}}
|
||||
{"song": "Blood Meridian Blues", "artist": "Desert Prophets", "mood_arc": "frontier violence → exhausted prophecy", "beat": 4, "timestamp": "0:48", "duration_seconds": 14, "lyric_line": "The judge said mercy — the judge lied", "scene": {"mood": "despair", "colors": ["blood-soaked sand", "twilight purple"], "composition": "kneeling figure", "camera_movement": "overhead crane down", "description": "despair scene: The judge said mercy — the judge lied — kneeling figure with blood-soaked sand, twilight purple palette, overhead crane down camera."}}
|
||||
{"song": "Blood Meridian Blues", "artist": "Desert Prophets", "mood_arc": "frontier violence → exhausted prophecy", "beat": 5, "timestamp": "1:02", "duration_seconds": 16, "lyric_line": "Write it in the dirt — the wind will read", "scene": {"mood": "resignation", "colors": ["parchment yellow", "ink black"], "composition": "handwriting close-up", "camera_movement": "tracking text", "description": "resignation scene: Write it in the dirt — the wind will read — handwriting close-up with parchment yellow, ink black palette, tracking text camera."}}
|
||||
{"song": "Blood Meridian Blues", "artist": "Desert Prophets", "mood_arc": "frontier violence → exhausted prophecy", "beat": 6, "timestamp": "1:18", "duration_seconds": 18, "lyric_line": "The meridian runs through the wound — follow it", "scene": {"mood": "prophecy", "colors": ["lightning white", "storm purple"], "composition": "prophet silhouette against storm", "camera_movement": "dramatic backlight", "description": "prophecy scene: The meridian runs through the wound — follow it — prophet silhouette against storm with lightning white, storm purple palette, dramatic backlight camera."}}
|
||||
{"song": "Blood Meridian Blues", "artist": "Desert Prophets", "mood_arc": "frontier violence → exhausted prophecy", "beat": 7, "timestamp": "1:36", "duration_seconds": 15, "lyric_line": "Every prophecy costs a pint of blood", "scene": {"mood": "exhaustion", "colors": ["dried blood brown", "dusk gold"], "composition": "figure collapses", "camera_movement": "slow fall with subject", "description": "exhaustion scene: Every prophecy costs a pint of blood — figure collapses with dried blood brown, dusk gold palette, slow fall with subject camera."}}
|
||||
{"song": "Blood Meridian Blues", "artist": "Desert Prophets", "mood_arc": "frontier violence → exhausted prophecy", "beat": 8, "timestamp": "1:51", "duration_seconds": 16, "lyric_line": "The frontier was always inside us", "scene": {"mood": "bitter truth", "colors": ["moonlit silver", "dark earth"], "composition": "grave marker", "camera_movement": "slow zoom reveal", "description": "bitter truth scene: The frontier was always inside us — grave marker with moonlit silver, dark earth palette, slow zoom reveal camera."}}
|
||||
{"song": "Blood Meridian Blues", "artist": "Desert Prophets", "mood_arc": "frontier violence → exhausted prophecy", "beat": 9, "timestamp": "2:07", "duration_seconds": 14, "lyric_line": "Rest now, prophet. The desert remembers.", "scene": {"mood": "peace", "colors": ["dawn rose", "quiet sand"], "composition": "sunrise landscape", "camera_movement": "steady wide", "description": "peace scene: Rest now, prophet. The desert remembers. — sunrise landscape with dawn rose, quiet sand palette, steady wide camera."}}
|
||||
{"song": "Blood Meridian Blues", "artist": "Desert Prophets", "mood_arc": "frontier violence → exhausted prophecy", "beat": 10, "timestamp": "2:21", "duration_seconds": 13, "lyric_line": "The meridian has no end", "scene": {"mood": "eternity", "colors": ["endless tan", "pale sky"], "composition": "vanishing point", "camera_movement": "infinite zoom out", "description": "eternity scene: The meridian has no end — vanishing point with endless tan, pale sky palette, infinite zoom out camera."}}
|
||||
{"song": "Iron Maiden's Lullaby", "artist": "Lullaby Massacre", "mood_arc": "false comfort → brutal awakening", "beat": 1, "timestamp": "0:00", "duration_seconds": 15, "lyric_line": "Close your eyes, little one, the cage is warm", "scene": {"mood": "false comfort", "colors": ["nursery pastels", "soft pink"], "composition": "music box close-up", "camera_movement": "gentle macro", "description": "false comfort scene: Close your eyes, little one, the cage is warm — music box close-up with nursery pastels, soft pink palette, gentle macro camera."}}
|
||||
{"song": "Iron Maiden's Lullaby", "artist": "Lullaby Massacre", "mood_arc": "false comfort → brutal awakening", "beat": 2, "timestamp": "0:15", "duration_seconds": 14, "lyric_line": "The lullaby has teeth behind its smile", "scene": {"mood": "unease", "colors": ["sickly sweet", "underlying rust"], "composition": "cracked doll face", "camera_movement": "slow rack to crack", "description": "unease scene: The lullaby has teeth behind its smile — cracked doll face with sickly sweet, underlying rust palette, slow rack to crack camera."}}
|
||||
{"song": "Iron Maiden's Lullaby", "artist": "Lullaby Massacre", "mood_arc": "false comfort → brutal awakening", "beat": 3, "timestamp": "0:29", "duration_seconds": 17, "lyric_line": "SING! The maiden opens her arms for you", "scene": {"mood": "horror", "colors": ["blood red spikes", "iron gray"], "composition": "iron maiden interior", "camera_movement": "POV closing in", "description": "horror scene: SING! The maiden opens her arms for you — iron maiden interior with blood red spikes, iron gray palette, POV closing in camera."}}
|
||||
{"song": "Iron Maiden's Lullaby", "artist": "Lullaby Massacre", "mood_arc": "false comfort → brutal awakening", "beat": 4, "timestamp": "0:46", "duration_seconds": 15, "lyric_line": "Every note a nail, every verse a spike", "scene": {"mood": "agony", "colors": ["crimson", "cold steel"], "composition": "extreme close-up — spikes", "camera_movement": "impact shake", "description": "agony scene: Every note a nail, every verse a spike — extreme close-up — spikes with crimson, cold steel palette, impact shake camera."}}
|
||||
{"song": "Iron Maiden's Lullaby", "artist": "Lullaby Massacre", "mood_arc": "false comfort → brutal awakening", "beat": 5, "timestamp": "1:01", "duration_seconds": 16, "lyric_line": "BREAK the box — the lullaby is a LIE", "scene": {"mood": "rage", "colors": ["flame orange", "shadow black"], "composition": "figure breaking free", "camera_movement": "explosive zoom out", "description": "rage scene: BREAK the box — the lullaby is a LIE — figure breaking free with flame orange, shadow black palette, explosive zoom out camera."}}
|
||||
{"song": "Iron Maiden's Lullaby", "artist": "Lullaby Massacre", "mood_arc": "false comfort → brutal awakening", "beat": 6, "timestamp": "1:17", "duration_seconds": 17, "lyric_line": "I sang myself awake from the iron sleep", "scene": {"mood": "defiance", "colors": ["dawn red", "night black"], "composition": "standing in ruins", "camera_movement": "low angle power", "description": "defiance scene: I sang myself awake from the iron sleep — standing in ruins with dawn red, night black palette, low angle power camera."}}
|
||||
{"song": "Iron Maiden's Lullaby", "artist": "Lullaby Massacre", "mood_arc": "false comfort → brutal awakening", "beat": 7, "timestamp": "1:34", "duration_seconds": 14, "lyric_line": "The box still plays but I won't listen", "scene": {"mood": "sorrow", "colors": ["rain on iron", "muted rose"], "composition": "holding broken music box", "camera_movement": "close-up hands", "description": "sorrow scene: The box still plays but I won't listen — holding broken music box with rain on iron, muted rose palette, close-up hands camera."}}
|
||||
{"song": "Iron Maiden's Lullaby", "artist": "Lullaby Massacre", "mood_arc": "false comfort → brutal awakening", "beat": 8, "timestamp": "1:48", "duration_seconds": 16, "lyric_line": "My lullaby is the sound of my own voice", "scene": {"mood": "strength", "colors": ["warm gold", "healing green"], "composition": "walking into light", "camera_movement": "tracking forward", "description": "strength scene: My lullaby is the sound of my own voice — walking into light with warm gold, healing green palette, tracking forward camera."}}
|
||||
{"song": "Iron Maiden's Lullaby", "artist": "Lullaby Massacre", "mood_arc": "false comfort → brutal awakening", "beat": 9, "timestamp": "2:04", "duration_seconds": 15, "lyric_line": "No cage. No maiden. No more lullabies.", "scene": {"mood": "peace", "colors": ["soft blue", "morning white"], "composition": "open field", "camera_movement": "wide steady", "description": "peace scene: No cage. No maiden. No more lullabies. — open field with soft blue, morning white palette, wide steady camera."}}
|
||||
{"song": "Iron Maiden's Lullaby", "artist": "Lullaby Massacre", "mood_arc": "false comfort → brutal awakening", "beat": 10, "timestamp": "2:19", "duration_seconds": 12, "lyric_line": "...", "scene": {"mood": "silence", "colors": ["gentle white", "still"], "composition": "empty frame", "camera_movement": "long static hold", "description": "silence scene: ... — empty frame with gentle white, still palette, long static hold camera."}}
|
||||
{"song": "Wormwood Star", "artist": "Apocalypse Engine", "mood_arc": "omen → end of the world → strange beauty", "beat": 1, "timestamp": "0:00", "duration_seconds": 17, "lyric_line": "Wormwood falls — the sky cracks like a plate", "scene": {"mood": "omen", "colors": ["toxic green", "starfield black"], "composition": "star falling", "camera_movement": "wide sky tracking", "description": "omen scene: Wormwood falls — the sky cracks like a plate — star falling with toxic green, starfield black palette, wide sky tracking camera."}}
|
||||
{"song": "Wormwood Star", "artist": "Apocalypse Engine", "mood_arc": "omen → end of the world → strange beauty", "beat": 2, "timestamp": "0:17", "duration_seconds": 15, "lyric_line": "Every river tastes like the end", "scene": {"mood": "dread", "colors": ["poison yellow-green", "dark water"], "composition": "ocean turning bitter", "camera_movement": "surface-level pan", "description": "dread scene: Every river tastes like the end — ocean turning bitter with poison yellow-green, dark water palette, surface-level pan camera."}}
|
||||
{"song": "Wormwood Star", "artist": "Apocalypse Engine", "mood_arc": "omen → end of the world → strange beauty", "beat": 3, "timestamp": "0:32", "duration_seconds": 18, "lyric_line": "WORMWOOD! The third angel screams your name", "scene": {"mood": "chaos", "colors": ["fire rain", "apocalypse orange"], "composition": "cityscape destruction", "camera_movement": "aerial devastation sweep", "description": "chaos scene: WORMWOOD! The third angel screams your name — cityscape destruction with fire rain, apocalypse orange palette, aerial devastation sweep camera."}}
|
||||
{"song": "Wormwood Star", "artist": "Apocalypse Engine", "mood_arc": "omen → end of the world → strange beauty", "beat": 4, "timestamp": "0:50", "duration_seconds": 14, "lyric_line": "A third of the sea — gone. Just gone.", "scene": {"mood": "despair", "colors": ["ash gray", "blood moon"], "composition": "survivors huddled", "camera_movement": "handheld intimacy", "description": "despair scene: A third of the sea — gone. Just gone. — survivors huddled with ash gray, blood moon palette, handheld intimacy camera."}}
|
||||
{"song": "Wormwood Star", "artist": "Apocalypse Engine", "mood_arc": "omen → end of the world → strange beauty", "beat": 5, "timestamp": "1:04", "duration_seconds": 16, "lyric_line": "But look — through the poison, new light", "scene": {"mood": "wonder", "colors": ["strange new stars", "deep violet"], "composition": "looking up through ruins", "camera_movement": "slow crane reveal", "description": "wonder scene: But look — through the poison, new light — looking up through ruins with strange new stars, deep violet palette, slow crane reveal camera."}}
|
||||
{"song": "Wormwood Star", "artist": "Apocalypse Engine", "mood_arc": "omen → end of the world → strange beauty", "beat": 6, "timestamp": "1:20", "duration_seconds": 17, "lyric_line": "The wormwood flowers in the wreckage", "scene": {"mood": "beauty in destruction", "colors": ["iridescent decay", "prismatic"], "composition": "macro toxic bloom", "camera_movement": "slow focus pull", "description": "beauty in destruction scene: The wormwood flowers in the wreckage — macro toxic bloom with iridescent decay, prismatic palette, slow focus pull camera."}}
|
||||
{"song": "Wormwood Star", "artist": "Apocalypse Engine", "mood_arc": "omen → end of the world → strange beauty", "beat": 7, "timestamp": "1:37", "duration_seconds": 15, "lyric_line": "The end is just a season with bad PR", "scene": {"mood": "acceptance", "colors": ["calm dark green", "starlight silver"], "composition": "figure sitting in ruin", "camera_movement": "medium shot still", "description": "acceptance scene: The end is just a season with bad PR — figure sitting in ruin with calm dark green, starlight silver palette, medium shot still camera."}}
|
||||
{"song": "Wormwood Star", "artist": "Apocalypse Engine", "mood_arc": "omen → end of the world → strange beauty", "beat": 8, "timestamp": "1:52", "duration_seconds": 16, "lyric_line": "After wormwood, the first clean rain", "scene": {"mood": "rebirth", "colors": ["green shoots", "morning gold"], "composition": "plant through concrete", "camera_movement": "time-lapse growth", "description": "rebirth scene: After wormwood, the first clean rain — plant through concrete with green shoots, morning gold palette, time-lapse growth camera."}}
|
||||
{"song": "Wormwood Star", "artist": "Apocalypse Engine", "mood_arc": "omen → end of the world → strange beauty", "beat": 9, "timestamp": "2:08", "duration_seconds": 14, "lyric_line": "The star was a seed, not a sentence", "scene": {"mood": "hope", "colors": ["clear blue", "new green"], "composition": "wide new landscape", "camera_movement": "steady wide pan", "description": "hope scene: The star was a seed, not a sentence — wide new landscape with clear blue, new green palette, steady wide pan camera."}}
|
||||
{"song": "Wormwood Star", "artist": "Apocalypse Engine", "mood_arc": "omen → end of the world → strange beauty", "beat": 10, "timestamp": "2:22", "duration_seconds": 13, "lyric_line": "Wormwood blooms. Watch.", "scene": {"mood": "wonder", "colors": ["bright star", "deep blue sky"], "composition": "single star close-up", "camera_movement": "slow zoom to star", "description": "wonder scene: Wormwood blooms. Watch. — single star close-up with bright star, deep blue sky palette, slow zoom to star camera."}}
|
||||
{"song": "Hammer of the Void", "artist": "Event Horizon", "mood_arc": "cosmic insignificance → defiant creation", "beat": 1, "timestamp": "0:00", "duration_seconds": 18, "lyric_line": "The void is not empty — it is full of absence", "scene": {"mood": "vastness", "colors": ["deep space black", "distant blue"], "composition": "tiny ship, vast void", "camera_movement": "extreme wide pull back", "description": "vastness scene: The void is not empty — it is full of absence — tiny ship, vast void with deep space black, distant blue palette, extreme wide pull back camera."}}
|
||||
{"song": "Hammer of the Void", "artist": "Event Horizon", "mood_arc": "cosmic insignificance → defiant creation", "beat": 2, "timestamp": "0:18", "duration_seconds": 15, "lyric_line": "We are the error in infinity's math", "scene": {"mood": "insignificance", "colors": ["cold white star", "infinite black"], "composition": "figure vs cosmos", "camera_movement": "slow zoom out forever", "description": "insignificance scene: We are the error in infinity's math — figure vs cosmos with cold white star, infinite black palette, slow zoom out forever camera."}}
|
||||
{"song": "Hammer of the Void", "artist": "Event Horizon", "mood_arc": "cosmic insignificance → defiant creation", "beat": 3, "timestamp": "0:33", "duration_seconds": 17, "lyric_line": "HAMMER! Strike the nothing until it bleeds", "scene": {"mood": "rage", "colors": ["supernova red", "void"], "composition": "hammer striking space", "camera_movement": "impact freeze frame", "description": "rage scene: HAMMER! Strike the nothing until it bleeds — hammer striking space with supernova red, void palette, impact freeze frame camera."}}
|
||||
{"song": "Hammer of the Void", "artist": "Event Horizon", "mood_arc": "cosmic insignificance → defiant creation", "beat": 4, "timestamp": "0:50", "duration_seconds": 16, "lyric_line": "From the absence we hammer out a sun", "scene": {"mood": "creation", "colors": ["forge fire orange", "newborn star gold"], "composition": "building from void", "camera_movement": "rapid assembly montage", "description": "creation scene: From the absence we hammer out a sun — building from void with forge fire orange, newborn star gold palette, rapid assembly montage camera."}}
|
||||
{"song": "Hammer of the Void", "artist": "Event Horizon", "mood_arc": "cosmic insignificance → defiant creation", "beat": 5, "timestamp": "1:06", "duration_seconds": 14, "lyric_line": "The void does not get the last word", "scene": {"mood": "defiance", "colors": ["bright steel", "deep cosmos"], "composition": "figure with hammer raised", "camera_movement": "hero low angle", "description": "defiance scene: The void does not get the last word — figure with hammer raised with bright steel, deep cosmos palette, hero low angle camera."}}
|
||||
{"song": "Hammer of the Void", "artist": "Event Horizon", "mood_arc": "cosmic insignificance → defiant creation", "beat": 6, "timestamp": "1:20", "duration_seconds": 18, "lyric_line": "We made a world from the hammer's echo", "scene": {"mood": "triumph", "colors": ["golden light", "crystal blue"], "composition": "new world revealed", "camera_movement": "epic crane up reveal", "description": "triumph scene: We made a world from the hammer's echo — new world revealed with golden light, crystal blue palette, epic crane up reveal camera."}}
|
||||
{"song": "Hammer of the Void", "artist": "Event Horizon", "mood_arc": "cosmic insignificance → defiant creation", "beat": 7, "timestamp": "1:38", "duration_seconds": 15, "lyric_line": "The hammer sleeps but the world it built breathes", "scene": {"mood": "wonder", "colors": ["aurora green", "starlight"], "composition": "standing on new world", "camera_movement": "360 pan landscape", "description": "wonder scene: The hammer sleeps but the world it built breathes — standing on new world with aurora green, starlight palette, 360 pan landscape camera."}}
|
||||
{"song": "Hammer of the Void", "artist": "Event Horizon", "mood_arc": "cosmic insignificance → defiant creation", "beat": 8, "timestamp": "1:53", "duration_seconds": 16, "lyric_line": "We are small. We made something. That is enough.", "scene": {"mood": "peace", "colors": ["warm amber", "soft void"], "composition": "sitting at the edge", "camera_movement": "wide meditative", "description": "peace scene: We are small. We made something. That is enough. — sitting at the edge with warm amber, soft void palette, wide meditative camera."}}
|
||||
{"song": "Hammer of the Void", "artist": "Event Horizon", "mood_arc": "cosmic insignificance → defiant creation", "beat": 9, "timestamp": "2:09", "duration_seconds": 14, "lyric_line": "The hammer stands — a monument to refusal", "scene": {"mood": "legacy", "colors": ["monument gold", "sky blue"], "composition": "hammer planted in ground", "camera_movement": "slow push monument", "description": "legacy scene: The hammer stands — a monument to refusal — hammer planted in ground with monument gold, sky blue palette, slow push monument camera."}}
|
||||
{"song": "Hammer of the Void", "artist": "Event Horizon", "mood_arc": "cosmic insignificance → defiant creation", "beat": 10, "timestamp": "2:23", "duration_seconds": 12, "lyric_line": "The void remembers the hammer", "scene": {"mood": "eternity", "colors": ["deep space", "warm glow"], "composition": "cosmic wide shot", "camera_movement": "infinite hold", "description": "eternity scene: The void remembers the hammer — cosmic wide shot with deep space, warm glow palette, infinite hold camera."}}
|
||||
@@ -1,129 +0,0 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
augment_pairs.py — Training data augmentation: paraphrase and translate.
|
||||
|
||||
Usage:
|
||||
python3 augment_pairs.py --input data.jsonl
|
||||
python3 augment_pairs.py --input data.jsonl --paraphrases 3 --langs es,fr,de
|
||||
python3 augment_pairs.py --input data.jsonl --llm-endpoint http://localhost:11434/v1
|
||||
"""
|
||||
|
||||
import json, os, sys, re, random
|
||||
from pathlib import Path
|
||||
|
||||
random.seed(42)
|
||||
|
||||
PARAPHRASE_TRANSFORMS = [
|
||||
lambda s: re.sub(r"(\w+), (\w+)", r"\2, \1", s, count=1),
|
||||
lambda s: f"A beautifully rendered scene: {s[0].lower()}{s[1:]}" if len(s) > 10 else s,
|
||||
lambda s: s.replace("A ", "The ").replace("An ", "The ") if s.startswith(("A ", "An ")) else f"Here, {s[0].lower()}{s[1:]}",
|
||||
lambda s: f"In a cinematic frame: {s}" if len(s) > 20 else s,
|
||||
lambda s: s if ", " not in s else ", ".join(s.split(", ")[:2]),
|
||||
]
|
||||
|
||||
TRANSLATIONS = {
|
||||
"es": {"the":"el","a":"un","is":"es","in":"en","of":"de","and":"y","with":"con","scene":"escena","light":"luz","dark":"oscuro","warm":"cálido","rain":"lluvia","sun":"sol","moon":"luna","sky":"cielo","forest":"bosque","mountain":"montaña","ocean":"océano","golden":"dorado","blue":"azul","red":"rojo","green":"verde","silence":"silencio","dream":"sueño","love":"amor","hope":"esperanza","fear":"miedo","joy":"alegría","peace":"paz","beautiful":"hermoso","sad":"triste","shadow":"sombra","color":"color","silver":"plateado","white":"blanco","black":"negro","portray":"retrato"},
|
||||
"fr": {"the":"le","a":"un","is":"est","in":"dans","of":"de","and":"et","with":"avec","scene":"scène","light":"lumière","dark":"sombre","warm":"chaud","rain":"pluie","sun":"soleil","moon":"lune","sky":"ciel","forest":"forêt","mountain":"montagne","ocean":"océan","golden":"doré","blue":"bleu","red":"rouge","green":"vert","silence":"silence","dream":"rêve","love":"amour","hope":"espoir","fear":"peur","joy":"joie","peace":"paix","beautiful":"beau","sad":"triste","shadow":"ombre","color":"couleur","silver":"argenté","white":"blanc","black":"noir"},
|
||||
"de": {"the":"der","a":"ein","is":"ist","in":"in","of":"von","and":"und","with":"mit","scene":"Szene","light":"Licht","dark":"dunkel","warm":"warm","rain":"Regen","sun":"Sonne","moon":"Mond","sky":"Himmel","forest":"Wald","mountain":"Berg","ocean":"Ozean","golden":"golden","blue":"blau","red":"rot","green":"grün","silence":"Stille","dream":"Traum","love":"Liebe","hope":"Hoffnung","fear":"Angst","joy":"Freude","peace":"Frieden","beautiful":"schön","sad":"traurig","shadow":"Schatten","color":"Farbe","silver":"silbern","white":"weiß","black":"schwarz"},
|
||||
}
|
||||
|
||||
LANG_NAMES = {"es": "Spanish", "fr": "French", "de": "German"}
|
||||
|
||||
|
||||
def detect_text_field(entry):
|
||||
for f in ["rich","terse","text","content","lyric_line","description","scene_description","prompt","scene"]:
|
||||
if f in entry and isinstance(entry[f], str) and len(entry[f]) > 5:
|
||||
return f
|
||||
for k, v in entry.items():
|
||||
if isinstance(v, str) and len(v) > 5:
|
||||
return k
|
||||
return None
|
||||
|
||||
|
||||
def paraphrase(text):
|
||||
t = random.choice(PARAPHRASE_TRANSFORMS)(text)
|
||||
if t == text:
|
||||
t = text.replace(" and ", " & ").replace(" with ", " alongside ")
|
||||
if t == text:
|
||||
t = f"In this scene: {text[0].lower()}{text[1:]}" if text[0].isupper() else text
|
||||
return t
|
||||
|
||||
|
||||
def translate(text, lang):
|
||||
d = TRANSLATIONS.get(lang, {})
|
||||
words = text.split()
|
||||
out = []
|
||||
for w in words:
|
||||
lo = w.lower().strip(".,;:!?")
|
||||
suf = w[len(w.rstrip(".,;:!?")):]
|
||||
if lo in d:
|
||||
out.append(d[lo] + suf)
|
||||
else:
|
||||
out.append(w)
|
||||
return " ".join(out)
|
||||
|
||||
|
||||
def augment_file(input_path, output_path=None, n_para=3, langs=None, llm_endpoint=None):
|
||||
input_path = Path(input_path)
|
||||
if output_path is None:
|
||||
output_path = input_path.parent / f"{input_path.stem}_augmented{input_path.suffix}"
|
||||
|
||||
entries = [json.loads(l) for l in open(input_path) if l.strip()]
|
||||
if not entries:
|
||||
print(f"No entries in {input_path}"); return 0
|
||||
|
||||
tf = detect_text_field(entries[0])
|
||||
if not tf:
|
||||
print(f"ERROR: No text field in {input_path}", file=sys.stderr); return 0
|
||||
|
||||
print(f"Input: {input_path} ({len(entries)} entries, field={tf})")
|
||||
|
||||
aug_count = 0
|
||||
with open(output_path, "w") as out:
|
||||
for e in entries:
|
||||
out.write(json.dumps(e, ensure_ascii=False) + "\n")
|
||||
for i, e in enumerate(entries):
|
||||
text = e[tf]
|
||||
# Paraphrases
|
||||
for p in range(n_para):
|
||||
para = paraphrase(text)
|
||||
if para != text:
|
||||
ne = dict(e); ne[tf] = para
|
||||
ne["_augmentation"] = f"paraphrase_{p+1}"
|
||||
ne["_original"] = text[:100]
|
||||
out.write(json.dumps(ne, ensure_ascii=False) + "\n")
|
||||
aug_count += 1
|
||||
# Translations
|
||||
for lang in (langs or []):
|
||||
tr = translate(text, lang)
|
||||
if tr != text:
|
||||
ne = dict(e); ne[tf] = tr
|
||||
ne["_augmentation"] = f"translate_{lang}"
|
||||
ne["_language"] = lang
|
||||
ne["_original"] = text[:100]
|
||||
out.write(json.dumps(ne, ensure_ascii=False) + "\n")
|
||||
aug_count += 1
|
||||
if (i+1) % 100 == 0:
|
||||
print(f" {i+1}/{len(entries)} done ({aug_count} augmented)")
|
||||
|
||||
total = len(entries) + aug_count
|
||||
print(f"Done: {len(entries)} originals + {aug_count} augmented = {total}")
|
||||
print(f"Output: {output_path}")
|
||||
return aug_count
|
||||
|
||||
|
||||
def main():
|
||||
import argparse
|
||||
p = argparse.ArgumentParser()
|
||||
p.add_argument("--input", required=True)
|
||||
p.add_argument("--output", default=None)
|
||||
p.add_argument("--paraphrases", type=int, default=3)
|
||||
p.add_argument("--langs", default="es,fr,de")
|
||||
p.add_argument("--llm-endpoint", default=None)
|
||||
args = p.parse_args()
|
||||
langs = [l.strip() for l in args.langs.split(",") if l.strip()] if args.langs else []
|
||||
augment_file(args.input, args.output, args.paraphrases, langs, args.llm_endpoint)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
Reference in New Issue
Block a user