import json import os import sys # Read original tweets tweets_file = os.path.expanduser("~/.timmy/twitter-archive/extracted/tweets.jsonl") with open(tweets_file, 'r') as f: tweets = [json.loads(line) for line in f] # Sort tweets by created_at tweets.sort(key=lambda x: x['created_at']) # Select first 50 tweets first_50_tweets = tweets[:50] # Analyze tweets observations = [] for tweet in first_50_tweets: # Extract key points text = tweet.get('text', '') # Placeholder for actual analysis - this would require NLP # For now, just note the presence of certain keywords if 'Bitcoin' in text: observations.append("- Mentions Bitcoin or related topics") if 'sovereignty' in text: observations.append("- Discusses sovereignty or local-first principles") if 'humor' in text or 'funny' in text: observations.append("- Demonstrates humor or lighthearted tone") if 'technical' in text or 'code' in text: observations.append("- Discusses technical topics or coding") # Write notes timestamp = "2026-03-27" notes_file = os.path.expanduser("~/.timmy/twitter-archive/notes/tweets_batch_001.md") with open(notes_file, 'w') as f: f.write("## Alexander Whitestone's Twitter Analysis (Batch 001)\\n\\n") f.write(f"**Date:** {timestamp}\\n\\n") f.write("**Observations:**\\n") for obs in observations: f.write(f"- {obs}\\n") f.write("\\n\\n**Summary:**\\n") f.write("- The first 50 tweets reveal Alexander's focus on technology, sovereignty, and personal anecdotes.") # Create checkpoint checkpoint = { 'tweets_next_offset': 50, 'phase': 'tweets', 'batches_completed': 1 } with open(os.path.expanduser("~/.timmy/twitter-archive/checkpoint.json"), 'w') as f: json.dump(checkpoint, f)