Compare commits

...

1 Commits

Author SHA1 Message Date
ee84cc2b09 fix: extract hardcoded values to config, clean up bare pass statements
Fixes #776 — Extract hardcoded PRAGMA busy_timeout=5000 to config
  - Add db_busy_timeout_ms setting (default 5000) to Settings
  - Replace hardcoded values in memory_system.py (2 locations)
  - Replace hardcoded value in memory/unified.py (1 location)

Fixes #778 — Remove bare pass after logged exceptions in tools.py
  - Remove redundant pass after logger.warning in consult_grok()
  - Remove redundant pass after logger.warning in Lightning invoice block
  - Replace bare pass in _merge_catalog with logger.debug

Fixes #782 — Extract hardcoded sats limit in consult_grok()
  - Add grok_sats_hard_cap setting (default 100) to Settings
  - Replace magic number 100 with settings.grok_sats_hard_cap
  - Add _INVOICE_MEMO_MAX_LEN constant for query truncation
2026-03-21 21:45:40 +00:00
4 changed files with 16 additions and 8 deletions

View File

@@ -87,8 +87,12 @@ class Settings(BaseSettings):
xai_base_url: str = "https://api.x.ai/v1" xai_base_url: str = "https://api.x.ai/v1"
grok_default_model: str = "grok-3-fast" grok_default_model: str = "grok-3-fast"
grok_max_sats_per_query: int = 200 grok_max_sats_per_query: int = 200
grok_sats_hard_cap: int = 100 # Absolute ceiling on sats per Grok query
grok_free: bool = False # Skip Lightning invoice when user has own API key grok_free: bool = False # Skip Lightning invoice when user has own API key
# ── Database ──────────────────────────────────────────────────────────
db_busy_timeout_ms: int = 5000 # SQLite PRAGMA busy_timeout (ms)
# ── Claude (Anthropic) — cloud fallback backend ──────────────────────── # ── Claude (Anthropic) — cloud fallback backend ────────────────────────
# Used when Ollama is offline and local inference isn't available. # Used when Ollama is offline and local inference isn't available.
# Set ANTHROPIC_API_KEY to enable. Default model is Haiku (fast + cheap). # Set ANTHROPIC_API_KEY to enable. Default model is Haiku (fast + cheap).

View File

@@ -14,6 +14,8 @@ from dataclasses import dataclass, field
from datetime import UTC, datetime from datetime import UTC, datetime
from pathlib import Path from pathlib import Path
from config import settings
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
# Paths # Paths
@@ -28,7 +30,7 @@ def get_connection() -> Generator[sqlite3.Connection, None, None]:
with closing(sqlite3.connect(str(DB_PATH))) as conn: with closing(sqlite3.connect(str(DB_PATH))) as conn:
conn.row_factory = sqlite3.Row conn.row_factory = sqlite3.Row
conn.execute("PRAGMA journal_mode=WAL") conn.execute("PRAGMA journal_mode=WAL")
conn.execute("PRAGMA busy_timeout=5000") conn.execute(f"PRAGMA busy_timeout={settings.db_busy_timeout_ms}")
_ensure_schema(conn) _ensure_schema(conn)
yield conn yield conn

View File

@@ -20,6 +20,7 @@ from dataclasses import dataclass, field
from datetime import UTC, datetime, timedelta from datetime import UTC, datetime, timedelta
from pathlib import Path from pathlib import Path
from config import settings
from timmy.memory.embeddings import ( from timmy.memory.embeddings import (
EMBEDDING_DIM, EMBEDDING_DIM,
EMBEDDING_MODEL, # noqa: F401 — re-exported for backward compatibility EMBEDDING_MODEL, # noqa: F401 — re-exported for backward compatibility
@@ -111,7 +112,7 @@ def get_connection() -> Generator[sqlite3.Connection, None, None]:
with closing(sqlite3.connect(str(DB_PATH))) as conn: with closing(sqlite3.connect(str(DB_PATH))) as conn:
conn.row_factory = sqlite3.Row conn.row_factory = sqlite3.Row
conn.execute("PRAGMA journal_mode=WAL") conn.execute("PRAGMA journal_mode=WAL")
conn.execute("PRAGMA busy_timeout=5000") conn.execute(f"PRAGMA busy_timeout={settings.db_busy_timeout_ms}")
_ensure_schema(conn) _ensure_schema(conn)
yield conn yield conn
@@ -949,7 +950,7 @@ class SemanticMemory:
with closing(sqlite3.connect(str(self.db_path))) as conn: with closing(sqlite3.connect(str(self.db_path))) as conn:
conn.row_factory = sqlite3.Row conn.row_factory = sqlite3.Row
conn.execute("PRAGMA journal_mode=WAL") conn.execute("PRAGMA journal_mode=WAL")
conn.execute("PRAGMA busy_timeout=5000") conn.execute(f"PRAGMA busy_timeout={settings.db_busy_timeout_ms}")
# Ensure schema exists # Ensure schema exists
conn.execute(""" conn.execute("""
CREATE TABLE IF NOT EXISTS memories ( CREATE TABLE IF NOT EXISTS memories (

View File

@@ -24,6 +24,9 @@ from config import settings
logger = logging.getLogger(__name__) logger = logging.getLogger(__name__)
# Max characters of user query included in Lightning invoice memo
_INVOICE_MEMO_MAX_LEN = 50
# Lazy imports to handle test mocking # Lazy imports to handle test mocking
_ImportError = None _ImportError = None
try: try:
@@ -447,7 +450,6 @@ def consult_grok(query: str) -> str:
) )
except (ImportError, AttributeError) as exc: except (ImportError, AttributeError) as exc:
logger.warning("Tool execution failed (consult_grok logging): %s", exc) logger.warning("Tool execution failed (consult_grok logging): %s", exc)
pass
# Generate Lightning invoice for monetization (unless free mode) # Generate Lightning invoice for monetization (unless free mode)
invoice_info = "" invoice_info = ""
@@ -456,12 +458,11 @@ def consult_grok(query: str) -> str:
from lightning.factory import get_backend as get_ln_backend from lightning.factory import get_backend as get_ln_backend
ln = get_ln_backend() ln = get_ln_backend()
sats = min(settings.grok_max_sats_per_query, 100) sats = min(settings.grok_max_sats_per_query, settings.grok_sats_hard_cap)
inv = ln.create_invoice(sats, f"Grok query: {query[:50]}") inv = ln.create_invoice(sats, f"Grok query: {query[:_INVOICE_MEMO_MAX_LEN]}")
invoice_info = f"\n[Lightning invoice: {sats} sats — {inv.payment_request[:40]}...]" invoice_info = f"\n[Lightning invoice: {sats} sats — {inv.payment_request[:40]}...]"
except (ImportError, OSError, ValueError) as exc: except (ImportError, OSError, ValueError) as exc:
logger.warning("Tool execution failed (Lightning invoice): %s", exc) logger.warning("Tool execution failed (Lightning invoice): %s", exc)
pass
result = backend.run(query) result = backend.run(query)
@@ -940,7 +941,7 @@ def _merge_catalog(
"available_in": available_in, "available_in": available_in,
} }
except ImportError: except ImportError:
pass logger.debug("Optional catalog %s.%s not available", module_path, attr_name)
def get_all_available_tools() -> dict[str, dict]: def get_all_available_tools() -> dict[str, dict]: