Files
the-nexus/experiments/profile_memory_usage.py
Timmy-Sprint 11c3520507 paper: add §4.6 memory profiling with measured 7.7KB/session data
- New experiment: profile_memory_usage.py (tracemalloc + RSS at 1-100 sessions)
- Results: 7.7 KB/session (23% under prior 10KB estimate)
- New paper section §4.6 with scaling table
- Updated §5.6 scalability with measured data instead of theory
- Version bump to 0.1.3-draft
2026-04-13 02:10:07 -04:00

168 lines
5.1 KiB
Python

#!/usr/bin/env python3
"""
Memory Profiling: Multi-User Bridge session overhead.
Measures:
1. Per-session memory footprint (RSS delta per user)
2. History window scaling (10, 50, 100 messages)
3. Total memory at 50 and 100 concurrent sessions
Usage:
python experiments/profile_memory_usage.py
"""
import gc
import json
import os
import sys
import tracemalloc
sys.path.insert(0, os.path.dirname(os.path.dirname(os.path.abspath(__file__))))
from nexus.multi_user_bridge import SessionManager, UserSession, CrisisState
def get_rss_mb():
"""Get current process RSS in MB (macOS/Linux)."""
import resource
rss = resource.getrusage(resource.RUSAGE_SELF).ru_maxrss
# macOS reports bytes, Linux reports KB
if rss > 1024 * 1024: # likely bytes (macOS)
return rss / (1024 * 1024)
return rss / 1024 # likely KB (Linux)
def profile_session_creation():
"""Measure memory per session at different scales."""
results = []
for num_sessions in [1, 5, 10, 20, 50, 100]:
gc.collect()
tracemalloc.start()
rss_before = get_rss_mb()
mgr = SessionManager(max_sessions=num_sessions + 10)
for i in range(num_sessions):
s = mgr.get_or_create(f"user_{i}", f"User {i}", "Tower")
# Add 20 messages per user (default history window)
for j in range(20):
s.add_message("user", f"Test message {j} from user {i}")
current, peak = tracemalloc.get_traced_memory()
tracemalloc.stop()
rss_after = get_rss_mb()
per_session_bytes = current / num_sessions
results.append({
"sessions": num_sessions,
"rss_mb_before": round(rss_before, 2),
"rss_mb_after": round(rss_after, 2),
"rss_delta_mb": round(rss_after - rss_before, 2),
"tracemalloc_current_kb": round(current / 1024, 1),
"tracemalloc_peak_kb": round(peak / 1024, 1),
"per_session_bytes": round(per_session_bytes, 1),
"per_session_kb": round(per_session_bytes / 1024, 2),
})
del mgr
gc.collect()
return results
def profile_history_window():
"""Measure memory scaling with different history windows."""
results = []
for window in [10, 20, 50, 100, 200]:
gc.collect()
tracemalloc.start()
mgr = SessionManager(max_sessions=100, history_window=window)
s = mgr.get_or_create("test_user", "Test", "Tower")
for j in range(window):
# Simulate realistic message sizes (~500 bytes)
s.add_message("user", f"Message {j}: " + "x" * 450)
s.add_message("assistant", f"Response {j}: " + "y" * 450)
current, peak = tracemalloc.get_traced_memory()
tracemalloc.stop()
msg_count = len(s.message_history)
bytes_per_message = current / msg_count if msg_count else 0
results.append({
"configured_window": window,
"actual_messages": msg_count,
"tracemalloc_kb": round(current / 1024, 1),
"bytes_per_message": round(bytes_per_message, 1),
})
del mgr
gc.collect()
return results
def profile_crisis_state():
"""Verify CrisisState memory is negligible."""
gc.collect()
tracemalloc.start()
states = [CrisisState() for _ in range(10000)]
for i, cs in enumerate(states):
cs.check(f"message {i}")
current, _ = tracemalloc.get_traced_memory()
tracemalloc.stop()
return {
"states": 10000,
"total_kb": round(current / 1024, 1),
"per_state_bytes": round(current / 10000, 2),
}
if __name__ == "__main__":
print("═══ Memory Profiling: Multi-User Bridge ═══\n")
# Test 1: Session creation scaling
print("── Test 1: Per-session memory at scale ──")
session_results = profile_session_creation()
for r in session_results:
print(f" {r['sessions']:>3} sessions: "
f"RSS +{r['rss_delta_mb']:.1f} MB, "
f"tracemalloc {r['tracemalloc_current_kb']:.0f} KB, "
f"~{r['per_session_bytes']:.0f} B/session")
print()
# Test 2: History window scaling
print("── Test 2: History window scaling ──")
window_results = profile_history_window()
for r in window_results:
print(f" Window {r['configured_window']:>3}: "
f"{r['actual_messages']} msgs, "
f"{r['tracemalloc_kb']:.1f} KB, "
f"{r['bytes_per_message']:.0f} B/msg")
print()
# Test 3: CrisisState overhead
print("── Test 3: CrisisState overhead ──")
crisis = profile_crisis_state()
print(f" 10,000 CrisisState instances: {crisis['total_kb']:.1f} KB "
f"({crisis['per_state_bytes']:.2f} B each)")
print()
print("═══ Complete ═══")
# Output JSON
output = {
"session_scaling": session_results,
"history_window": window_results,
"crisis_state": crisis,
}
print("\n" + json.dumps(output, indent=2))