From ee84cc2b0926c8c8d44654a65eaeabc3d5c42ed3 Mon Sep 17 00:00:00 2001 From: Perplexity Computer Date: Sat, 21 Mar 2026 21:45:40 +0000 Subject: [PATCH] fix: extract hardcoded values to config, clean up bare pass statements MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit Fixes #776 — Extract hardcoded PRAGMA busy_timeout=5000 to config - Add db_busy_timeout_ms setting (default 5000) to Settings - Replace hardcoded values in memory_system.py (2 locations) - Replace hardcoded value in memory/unified.py (1 location) Fixes #778 — Remove bare pass after logged exceptions in tools.py - Remove redundant pass after logger.warning in consult_grok() - Remove redundant pass after logger.warning in Lightning invoice block - Replace bare pass in _merge_catalog with logger.debug Fixes #782 — Extract hardcoded sats limit in consult_grok() - Add grok_sats_hard_cap setting (default 100) to Settings - Replace magic number 100 with settings.grok_sats_hard_cap - Add _INVOICE_MEMO_MAX_LEN constant for query truncation --- src/config.py | 4 ++++ src/timmy/memory/unified.py | 4 +++- src/timmy/memory_system.py | 5 +++-- src/timmy/tools.py | 11 ++++++----- 4 files changed, 16 insertions(+), 8 deletions(-) diff --git a/src/config.py b/src/config.py index f43be92..62b4680 100644 --- a/src/config.py +++ b/src/config.py @@ -87,8 +87,12 @@ class Settings(BaseSettings): xai_base_url: str = "https://api.x.ai/v1" grok_default_model: str = "grok-3-fast" grok_max_sats_per_query: int = 200 + grok_sats_hard_cap: int = 100 # Absolute ceiling on sats per Grok query grok_free: bool = False # Skip Lightning invoice when user has own API key + # ── Database ────────────────────────────────────────────────────────── + db_busy_timeout_ms: int = 5000 # SQLite PRAGMA busy_timeout (ms) + # ── Claude (Anthropic) — cloud fallback backend ──────────────────────── # Used when Ollama is offline and local inference isn't available. # Set ANTHROPIC_API_KEY to enable. Default model is Haiku (fast + cheap). diff --git a/src/timmy/memory/unified.py b/src/timmy/memory/unified.py index 97fd888..407c275 100644 --- a/src/timmy/memory/unified.py +++ b/src/timmy/memory/unified.py @@ -14,6 +14,8 @@ from dataclasses import dataclass, field from datetime import UTC, datetime from pathlib import Path +from config import settings + logger = logging.getLogger(__name__) # Paths @@ -28,7 +30,7 @@ def get_connection() -> Generator[sqlite3.Connection, None, None]: with closing(sqlite3.connect(str(DB_PATH))) as conn: conn.row_factory = sqlite3.Row conn.execute("PRAGMA journal_mode=WAL") - conn.execute("PRAGMA busy_timeout=5000") + conn.execute(f"PRAGMA busy_timeout={settings.db_busy_timeout_ms}") _ensure_schema(conn) yield conn diff --git a/src/timmy/memory_system.py b/src/timmy/memory_system.py index 76ddd5c..c3d2d14 100644 --- a/src/timmy/memory_system.py +++ b/src/timmy/memory_system.py @@ -20,6 +20,7 @@ from dataclasses import dataclass, field from datetime import UTC, datetime, timedelta from pathlib import Path +from config import settings from timmy.memory.embeddings import ( EMBEDDING_DIM, EMBEDDING_MODEL, # noqa: F401 — re-exported for backward compatibility @@ -111,7 +112,7 @@ def get_connection() -> Generator[sqlite3.Connection, None, None]: with closing(sqlite3.connect(str(DB_PATH))) as conn: conn.row_factory = sqlite3.Row conn.execute("PRAGMA journal_mode=WAL") - conn.execute("PRAGMA busy_timeout=5000") + conn.execute(f"PRAGMA busy_timeout={settings.db_busy_timeout_ms}") _ensure_schema(conn) yield conn @@ -949,7 +950,7 @@ class SemanticMemory: with closing(sqlite3.connect(str(self.db_path))) as conn: conn.row_factory = sqlite3.Row conn.execute("PRAGMA journal_mode=WAL") - conn.execute("PRAGMA busy_timeout=5000") + conn.execute(f"PRAGMA busy_timeout={settings.db_busy_timeout_ms}") # Ensure schema exists conn.execute(""" CREATE TABLE IF NOT EXISTS memories ( diff --git a/src/timmy/tools.py b/src/timmy/tools.py index 38ca74b..3700909 100644 --- a/src/timmy/tools.py +++ b/src/timmy/tools.py @@ -24,6 +24,9 @@ from config import settings logger = logging.getLogger(__name__) +# Max characters of user query included in Lightning invoice memo +_INVOICE_MEMO_MAX_LEN = 50 + # Lazy imports to handle test mocking _ImportError = None try: @@ -447,7 +450,6 @@ def consult_grok(query: str) -> str: ) except (ImportError, AttributeError) as exc: logger.warning("Tool execution failed (consult_grok logging): %s", exc) - pass # Generate Lightning invoice for monetization (unless free mode) invoice_info = "" @@ -456,12 +458,11 @@ def consult_grok(query: str) -> str: from lightning.factory import get_backend as get_ln_backend ln = get_ln_backend() - sats = min(settings.grok_max_sats_per_query, 100) - inv = ln.create_invoice(sats, f"Grok query: {query[:50]}") + sats = min(settings.grok_max_sats_per_query, settings.grok_sats_hard_cap) + inv = ln.create_invoice(sats, f"Grok query: {query[:_INVOICE_MEMO_MAX_LEN]}") invoice_info = f"\n[Lightning invoice: {sats} sats — {inv.payment_request[:40]}...]" except (ImportError, OSError, ValueError) as exc: logger.warning("Tool execution failed (Lightning invoice): %s", exc) - pass result = backend.run(query) @@ -940,7 +941,7 @@ def _merge_catalog( "available_in": available_in, } except ImportError: - pass + logger.debug("Optional catalog %s.%s not available", module_path, attr_name) def get_all_available_tools() -> dict[str, dict]: