2026-02-02 08:26:42 -08:00
|
|
|
"""
|
|
|
|
|
Cron job scheduler - executes due jobs.
|
|
|
|
|
|
2026-02-21 16:21:19 -08:00
|
|
|
Provides tick() which checks for due jobs and runs them. The gateway
|
|
|
|
|
calls this every 60 seconds from a background thread.
|
|
|
|
|
|
|
|
|
|
Uses a file-based lock (~/.hermes/cron/.tick.lock) so only one tick
|
|
|
|
|
runs at a time if multiple processes overlap.
|
2026-02-02 08:26:42 -08:00
|
|
|
"""
|
|
|
|
|
|
2026-02-22 17:14:44 -08:00
|
|
|
import asyncio
|
2026-02-21 03:11:11 -08:00
|
|
|
import logging
|
2026-02-02 08:26:42 -08:00
|
|
|
import os
|
|
|
|
|
import sys
|
|
|
|
|
import traceback
|
2026-02-25 16:27:40 -08:00
|
|
|
|
|
|
|
|
# fcntl is Unix-only; on Windows use msvcrt for file locking
|
|
|
|
|
try:
|
|
|
|
|
import fcntl
|
|
|
|
|
except ImportError:
|
|
|
|
|
fcntl = None
|
|
|
|
|
try:
|
|
|
|
|
import msvcrt
|
|
|
|
|
except ImportError:
|
|
|
|
|
msvcrt = None
|
2026-02-02 08:26:42 -08:00
|
|
|
from datetime import datetime
|
|
|
|
|
from pathlib import Path
|
|
|
|
|
from typing import Optional
|
|
|
|
|
|
2026-03-03 11:57:18 +05:30
|
|
|
from hermes_time import now as _hermes_now
|
|
|
|
|
|
2026-02-21 03:11:11 -08:00
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
2026-02-02 08:26:42 -08:00
|
|
|
# Add parent directory to path for imports
|
|
|
|
|
sys.path.insert(0, str(Path(__file__).parent.parent))
|
|
|
|
|
|
|
|
|
|
from cron.jobs import get_due_jobs, mark_job_run, save_job_output
|
|
|
|
|
|
2026-02-26 18:51:46 +11:00
|
|
|
# Resolve Hermes home directory (respects HERMES_HOME override)
|
|
|
|
|
_hermes_home = Path(os.getenv("HERMES_HOME", Path.home() / ".hermes"))
|
|
|
|
|
|
2026-02-21 16:21:19 -08:00
|
|
|
# File-based lock prevents concurrent ticks from gateway + daemon + systemd timer
|
2026-02-26 18:51:46 +11:00
|
|
|
_LOCK_DIR = _hermes_home / "cron"
|
2026-02-21 16:21:19 -08:00
|
|
|
_LOCK_FILE = _LOCK_DIR / ".tick.lock"
|
|
|
|
|
|
2026-02-02 08:26:42 -08:00
|
|
|
|
2026-02-22 17:14:44 -08:00
|
|
|
def _resolve_origin(job: dict) -> Optional[dict]:
|
2026-03-11 09:15:34 +01:00
|
|
|
"""Extract origin info from a job, preserving any extra routing metadata."""
|
2026-02-22 17:14:44 -08:00
|
|
|
origin = job.get("origin")
|
|
|
|
|
if not origin:
|
|
|
|
|
return None
|
|
|
|
|
platform = origin.get("platform")
|
|
|
|
|
chat_id = origin.get("chat_id")
|
|
|
|
|
if platform and chat_id:
|
|
|
|
|
return origin
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
|
2026-03-14 19:07:50 -07:00
|
|
|
def _resolve_delivery_target(job: dict) -> Optional[dict]:
|
|
|
|
|
"""Resolve the concrete auto-delivery target for a cron job, if any."""
|
|
|
|
|
deliver = job.get("deliver", "local")
|
|
|
|
|
origin = _resolve_origin(job)
|
|
|
|
|
|
|
|
|
|
if deliver == "local":
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
if deliver == "origin":
|
|
|
|
|
if not origin:
|
|
|
|
|
return None
|
|
|
|
|
return {
|
|
|
|
|
"platform": origin["platform"],
|
|
|
|
|
"chat_id": str(origin["chat_id"]),
|
|
|
|
|
"thread_id": origin.get("thread_id"),
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
if ":" in deliver:
|
|
|
|
|
platform_name, chat_id = deliver.split(":", 1)
|
|
|
|
|
return {
|
|
|
|
|
"platform": platform_name,
|
|
|
|
|
"chat_id": chat_id,
|
|
|
|
|
"thread_id": None,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
platform_name = deliver
|
|
|
|
|
if origin and origin.get("platform") == platform_name:
|
|
|
|
|
return {
|
|
|
|
|
"platform": platform_name,
|
|
|
|
|
"chat_id": str(origin["chat_id"]),
|
|
|
|
|
"thread_id": origin.get("thread_id"),
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
chat_id = os.getenv(f"{platform_name.upper()}_HOME_CHANNEL", "")
|
|
|
|
|
if not chat_id:
|
|
|
|
|
return None
|
|
|
|
|
|
|
|
|
|
return {
|
|
|
|
|
"platform": platform_name,
|
|
|
|
|
"chat_id": chat_id,
|
|
|
|
|
"thread_id": None,
|
|
|
|
|
}
|
|
|
|
|
|
|
|
|
|
|
2026-02-22 17:14:44 -08:00
|
|
|
def _deliver_result(job: dict, content: str) -> None:
|
|
|
|
|
"""
|
|
|
|
|
Deliver job output to the configured target (origin chat, specific platform, etc.).
|
|
|
|
|
|
|
|
|
|
Uses the standalone platform send functions from send_message_tool so delivery
|
|
|
|
|
works whether or not the gateway is running.
|
|
|
|
|
"""
|
2026-03-14 19:07:50 -07:00
|
|
|
target = _resolve_delivery_target(job)
|
|
|
|
|
if not target:
|
|
|
|
|
if job.get("deliver", "local") != "local":
|
|
|
|
|
logger.warning(
|
|
|
|
|
"Job '%s' deliver=%s but no concrete delivery target could be resolved",
|
|
|
|
|
job["id"],
|
|
|
|
|
job.get("deliver", "local"),
|
|
|
|
|
)
|
2026-02-22 17:14:44 -08:00
|
|
|
return
|
|
|
|
|
|
2026-03-14 19:07:50 -07:00
|
|
|
platform_name = target["platform"]
|
|
|
|
|
chat_id = target["chat_id"]
|
|
|
|
|
thread_id = target.get("thread_id")
|
2026-02-22 17:14:44 -08:00
|
|
|
|
|
|
|
|
from tools.send_message_tool import _send_to_platform
|
|
|
|
|
from gateway.config import load_gateway_config, Platform
|
|
|
|
|
|
|
|
|
|
platform_map = {
|
|
|
|
|
"telegram": Platform.TELEGRAM,
|
|
|
|
|
"discord": Platform.DISCORD,
|
|
|
|
|
"slack": Platform.SLACK,
|
|
|
|
|
"whatsapp": Platform.WHATSAPP,
|
fix: Signal adapter parity pass — integration gaps, clawdbot features, env var simplification
Integration gaps fixed (7 files missing Signal):
- cron/scheduler.py: Signal in platform_map (cron delivery was broken)
- agent/prompt_builder.py: PLATFORM_HINTS for Signal (agent knows it's on Signal)
- toolsets.py: hermes-signal toolset + added to hermes-gateway composite
- hermes_cli/status.py: Signal + Slack in platform status display
- tools/send_message_tool.py: Signal example in target description
- tools/cronjob_tools.py: Signal in delivery option docs + schema
- gateway/channel_directory.py: Signal in session-based channel discovery
Clawdbot parity features added to signal.py:
- Self-message filtering: prevents reply loops by checking sender != account
- SyncMessage filtering: ignores sync envelopes (sent transcripts, read receipts)
- Edit message support: reads dataMessage from editMessage envelope
- Mention rendering: replaces \uFFFC placeholders with @identifier text
- Jitter in SSE reconnection backoff (20% randomization, prevents thundering herd)
Env var simplification (7 → 4):
- Removed SIGNAL_DM_POLICY (DM auth follows standard platform pattern via
SIGNAL_ALLOWED_USERS + DM pairing, same as Telegram/Discord)
- Removed SIGNAL_GROUP_POLICY (derived from SIGNAL_GROUP_ALLOWED_USERS:
not set = disabled, set with IDs = allowlist, set with * = open)
- Removed SIGNAL_DEBUG (was setting root logger, removed entirely)
- Remaining: SIGNAL_HTTP_URL, SIGNAL_ACCOUNT (required),
SIGNAL_ALLOWED_USERS, SIGNAL_GROUP_ALLOWED_USERS (optional)
Updated all docs (website, AGENTS.md, signal.md) to match.
2026-03-08 21:00:21 -07:00
|
|
|
"signal": Platform.SIGNAL,
|
feat: add email gateway platform (IMAP/SMTP)
Allow users to interact with Hermes by sending and receiving emails.
Uses IMAP polling for incoming messages and SMTP for replies with
proper threading (In-Reply-To, References headers).
Integrates with all 14 gateway extension points: config, adapter
factory, authorization, send_message tool, cron delivery, toolsets,
prompt hints, channel directory, setup wizard, status display, and
env example.
65 tests covering config, parsing, dispatch, threading, IMAP fetch,
SMTP send, attachments, and all integration points.
2026-03-10 03:15:38 +03:00
|
|
|
"email": Platform.EMAIL,
|
2026-02-22 17:14:44 -08:00
|
|
|
}
|
|
|
|
|
platform = platform_map.get(platform_name.lower())
|
|
|
|
|
if not platform:
|
|
|
|
|
logger.warning("Job '%s': unknown platform '%s' for delivery", job["id"], platform_name)
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
try:
|
|
|
|
|
config = load_gateway_config()
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error("Job '%s': failed to load gateway config for delivery: %s", job["id"], e)
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
pconfig = config.platforms.get(platform)
|
|
|
|
|
if not pconfig or not pconfig.enabled:
|
|
|
|
|
logger.warning("Job '%s': platform '%s' not configured/enabled", job["id"], platform_name)
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
# Run the async send in a fresh event loop (safe from any thread)
|
|
|
|
|
try:
|
2026-03-11 09:15:34 +01:00
|
|
|
result = asyncio.run(_send_to_platform(platform, pconfig, chat_id, content, thread_id=thread_id))
|
2026-02-22 17:14:44 -08:00
|
|
|
except RuntimeError:
|
|
|
|
|
# asyncio.run() fails if there's already a running loop in this thread;
|
|
|
|
|
# spin up a new thread to avoid that.
|
|
|
|
|
import concurrent.futures
|
|
|
|
|
with concurrent.futures.ThreadPoolExecutor(max_workers=1) as pool:
|
2026-03-11 09:15:34 +01:00
|
|
|
future = pool.submit(asyncio.run, _send_to_platform(platform, pconfig, chat_id, content, thread_id=thread_id))
|
2026-02-22 17:14:44 -08:00
|
|
|
result = future.result(timeout=30)
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.error("Job '%s': delivery to %s:%s failed: %s", job["id"], platform_name, chat_id, e)
|
|
|
|
|
return
|
|
|
|
|
|
|
|
|
|
if result and result.get("error"):
|
|
|
|
|
logger.error("Job '%s': delivery error: %s", job["id"], result["error"])
|
|
|
|
|
else:
|
|
|
|
|
logger.info("Job '%s': delivered to %s:%s", job["id"], platform_name, chat_id)
|
2026-02-22 20:44:15 -08:00
|
|
|
# Mirror the delivered content into the target's gateway session
|
|
|
|
|
try:
|
|
|
|
|
from gateway.mirror import mirror_to_session
|
2026-03-11 09:15:34 +01:00
|
|
|
mirror_to_session(platform_name, chat_id, content, source_label="cron", thread_id=thread_id)
|
2026-03-09 00:06:34 +03:00
|
|
|
except Exception as e:
|
|
|
|
|
logger.warning("Job '%s': mirror_to_session failed: %s", job["id"], e)
|
2026-02-22 17:14:44 -08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def run_job(job: dict) -> tuple[bool, str, str, Optional[str]]:
|
2026-02-02 08:26:42 -08:00
|
|
|
"""
|
|
|
|
|
Execute a single cron job.
|
|
|
|
|
|
|
|
|
|
Returns:
|
2026-02-22 17:14:44 -08:00
|
|
|
Tuple of (success, full_output_doc, final_response, error_message)
|
2026-02-02 08:26:42 -08:00
|
|
|
"""
|
|
|
|
|
from run_agent import AIAgent
|
|
|
|
|
|
2026-03-11 13:11:45 +03:00
|
|
|
# Initialize SQLite session store so cron job messages are persisted
|
|
|
|
|
# and discoverable via session_search (same pattern as gateway/run.py).
|
|
|
|
|
_session_db = None
|
|
|
|
|
try:
|
|
|
|
|
from hermes_state import SessionDB
|
|
|
|
|
_session_db = SessionDB()
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.debug("Job '%s': SQLite session store not available: %s", job.get("id", "?"), e)
|
|
|
|
|
|
2026-02-02 08:26:42 -08:00
|
|
|
job_id = job["id"]
|
|
|
|
|
job_name = job["name"]
|
|
|
|
|
prompt = job["prompt"]
|
2026-02-22 17:14:44 -08:00
|
|
|
origin = _resolve_origin(job)
|
2026-03-14 19:07:50 -07:00
|
|
|
delivery_target = _resolve_delivery_target(job)
|
2026-02-02 08:26:42 -08:00
|
|
|
|
2026-02-21 03:11:11 -08:00
|
|
|
logger.info("Running job '%s' (ID: %s)", job_name, job_id)
|
|
|
|
|
logger.info("Prompt: %s", prompt[:100])
|
2026-02-22 17:14:44 -08:00
|
|
|
|
|
|
|
|
# Inject origin context so the agent's send_message tool knows the chat
|
|
|
|
|
if origin:
|
|
|
|
|
os.environ["HERMES_SESSION_PLATFORM"] = origin["platform"]
|
|
|
|
|
os.environ["HERMES_SESSION_CHAT_ID"] = str(origin["chat_id"])
|
|
|
|
|
if origin.get("chat_name"):
|
|
|
|
|
os.environ["HERMES_SESSION_CHAT_NAME"] = origin["chat_name"]
|
2026-03-14 19:07:50 -07:00
|
|
|
if delivery_target:
|
|
|
|
|
os.environ["HERMES_CRON_AUTO_DELIVER_PLATFORM"] = delivery_target["platform"]
|
|
|
|
|
os.environ["HERMES_CRON_AUTO_DELIVER_CHAT_ID"] = str(delivery_target["chat_id"])
|
|
|
|
|
if delivery_target.get("thread_id") is not None:
|
|
|
|
|
os.environ["HERMES_CRON_AUTO_DELIVER_THREAD_ID"] = str(delivery_target["thread_id"])
|
2026-02-22 17:14:44 -08:00
|
|
|
|
2026-02-02 08:26:42 -08:00
|
|
|
try:
|
2026-02-25 02:54:11 -08:00
|
|
|
# Re-read .env and config.yaml fresh every run so provider/key
|
|
|
|
|
# changes take effect without a gateway restart.
|
|
|
|
|
from dotenv import load_dotenv
|
2026-02-25 15:20:42 -08:00
|
|
|
try:
|
2026-02-26 18:51:46 +11:00
|
|
|
load_dotenv(str(_hermes_home / ".env"), override=True, encoding="utf-8")
|
2026-02-25 15:20:42 -08:00
|
|
|
except UnicodeDecodeError:
|
2026-02-26 18:51:46 +11:00
|
|
|
load_dotenv(str(_hermes_home / ".env"), override=True, encoding="latin-1")
|
2026-02-25 02:54:11 -08:00
|
|
|
|
2026-03-11 22:04:42 -07:00
|
|
|
model = os.getenv("HERMES_MODEL") or "anthropic/claude-opus-4.6"
|
2026-02-25 02:54:11 -08:00
|
|
|
|
2026-03-07 11:37:16 -08:00
|
|
|
# Load config.yaml for model, reasoning, prefill, toolsets, provider routing
|
|
|
|
|
_cfg = {}
|
2026-02-25 02:54:11 -08:00
|
|
|
try:
|
|
|
|
|
import yaml
|
2026-02-26 18:51:46 +11:00
|
|
|
_cfg_path = str(_hermes_home / "config.yaml")
|
2026-02-25 02:54:11 -08:00
|
|
|
if os.path.exists(_cfg_path):
|
|
|
|
|
with open(_cfg_path) as _f:
|
|
|
|
|
_cfg = yaml.safe_load(_f) or {}
|
|
|
|
|
_model_cfg = _cfg.get("model", {})
|
|
|
|
|
if isinstance(_model_cfg, str):
|
|
|
|
|
model = _model_cfg
|
|
|
|
|
elif isinstance(_model_cfg, dict):
|
|
|
|
|
model = _model_cfg.get("default", model)
|
2026-03-09 00:06:34 +03:00
|
|
|
except Exception as e:
|
|
|
|
|
logger.warning("Job '%s': failed to load config.yaml, using defaults: %s", job_id, e)
|
2026-02-25 02:54:11 -08:00
|
|
|
|
2026-03-07 11:37:16 -08:00
|
|
|
# Reasoning config from env or config.yaml
|
|
|
|
|
reasoning_config = None
|
|
|
|
|
effort = os.getenv("HERMES_REASONING_EFFORT", "")
|
|
|
|
|
if not effort:
|
|
|
|
|
effort = str(_cfg.get("agent", {}).get("reasoning_effort", "")).strip()
|
|
|
|
|
if effort and effort.lower() != "none":
|
|
|
|
|
valid = ("xhigh", "high", "medium", "low", "minimal")
|
|
|
|
|
if effort.lower() in valid:
|
|
|
|
|
reasoning_config = {"enabled": True, "effort": effort.lower()}
|
|
|
|
|
elif effort.lower() == "none":
|
|
|
|
|
reasoning_config = {"enabled": False}
|
|
|
|
|
|
|
|
|
|
# Prefill messages from env or config.yaml
|
|
|
|
|
prefill_messages = None
|
|
|
|
|
prefill_file = os.getenv("HERMES_PREFILL_MESSAGES_FILE", "") or _cfg.get("prefill_messages_file", "")
|
|
|
|
|
if prefill_file:
|
|
|
|
|
import json as _json
|
|
|
|
|
pfpath = Path(prefill_file).expanduser()
|
|
|
|
|
if not pfpath.is_absolute():
|
|
|
|
|
pfpath = _hermes_home / pfpath
|
|
|
|
|
if pfpath.exists():
|
|
|
|
|
try:
|
|
|
|
|
with open(pfpath, "r", encoding="utf-8") as _pf:
|
|
|
|
|
prefill_messages = _json.load(_pf)
|
|
|
|
|
if not isinstance(prefill_messages, list):
|
|
|
|
|
prefill_messages = None
|
2026-03-10 17:10:01 -07:00
|
|
|
except Exception as e:
|
|
|
|
|
logger.warning("Job '%s': failed to parse prefill messages file '%s': %s", job_id, pfpath, e)
|
2026-03-07 11:37:16 -08:00
|
|
|
prefill_messages = None
|
|
|
|
|
|
|
|
|
|
# Max iterations
|
|
|
|
|
max_iterations = _cfg.get("agent", {}).get("max_turns") or _cfg.get("max_turns") or 90
|
|
|
|
|
|
|
|
|
|
# Provider routing
|
|
|
|
|
pr = _cfg.get("provider_routing", {})
|
|
|
|
|
|
2026-02-25 18:20:38 -08:00
|
|
|
from hermes_cli.runtime_provider import (
|
|
|
|
|
resolve_runtime_provider,
|
|
|
|
|
format_runtime_provider_error,
|
|
|
|
|
)
|
|
|
|
|
try:
|
|
|
|
|
runtime = resolve_runtime_provider(
|
|
|
|
|
requested=os.getenv("HERMES_INFERENCE_PROVIDER"),
|
|
|
|
|
)
|
|
|
|
|
except Exception as exc:
|
|
|
|
|
message = format_runtime_provider_error(exc)
|
|
|
|
|
raise RuntimeError(message) from exc
|
|
|
|
|
|
2026-02-02 08:26:42 -08:00
|
|
|
agent = AIAgent(
|
2026-02-25 02:54:11 -08:00
|
|
|
model=model,
|
2026-02-25 18:20:38 -08:00
|
|
|
api_key=runtime.get("api_key"),
|
|
|
|
|
base_url=runtime.get("base_url"),
|
|
|
|
|
provider=runtime.get("provider"),
|
|
|
|
|
api_mode=runtime.get("api_mode"),
|
2026-03-07 11:37:16 -08:00
|
|
|
max_iterations=max_iterations,
|
|
|
|
|
reasoning_config=reasoning_config,
|
|
|
|
|
prefill_messages=prefill_messages,
|
|
|
|
|
providers_allowed=pr.get("only"),
|
|
|
|
|
providers_ignored=pr.get("ignore"),
|
|
|
|
|
providers_order=pr.get("order"),
|
|
|
|
|
provider_sort=pr.get("sort"),
|
2026-02-02 08:26:42 -08:00
|
|
|
quiet_mode=True,
|
2026-03-14 00:12:34 -07:00
|
|
|
platform="cron",
|
2026-03-11 13:11:45 +03:00
|
|
|
session_id=f"cron_{job_id}_{_hermes_now().strftime('%Y%m%d_%H%M%S')}",
|
|
|
|
|
session_db=_session_db,
|
2026-02-02 08:26:42 -08:00
|
|
|
)
|
|
|
|
|
|
|
|
|
|
result = agent.run_conversation(prompt)
|
|
|
|
|
|
|
|
|
|
final_response = result.get("final_response", "")
|
|
|
|
|
if not final_response:
|
|
|
|
|
final_response = "(No response generated)"
|
|
|
|
|
|
|
|
|
|
output = f"""# Cron Job: {job_name}
|
|
|
|
|
|
|
|
|
|
**Job ID:** {job_id}
|
2026-03-03 11:57:18 +05:30
|
|
|
**Run Time:** {_hermes_now().strftime('%Y-%m-%d %H:%M:%S')}
|
2026-02-02 08:26:42 -08:00
|
|
|
**Schedule:** {job.get('schedule_display', 'N/A')}
|
|
|
|
|
|
|
|
|
|
## Prompt
|
|
|
|
|
|
|
|
|
|
{prompt}
|
|
|
|
|
|
|
|
|
|
## Response
|
|
|
|
|
|
|
|
|
|
{final_response}
|
|
|
|
|
"""
|
|
|
|
|
|
2026-02-21 03:11:11 -08:00
|
|
|
logger.info("Job '%s' completed successfully", job_name)
|
2026-02-22 17:14:44 -08:00
|
|
|
return True, output, final_response, None
|
2026-02-02 08:26:42 -08:00
|
|
|
|
|
|
|
|
except Exception as e:
|
|
|
|
|
error_msg = f"{type(e).__name__}: {str(e)}"
|
2026-02-21 03:11:11 -08:00
|
|
|
logger.error("Job '%s' failed: %s", job_name, error_msg)
|
2026-02-02 08:26:42 -08:00
|
|
|
|
|
|
|
|
output = f"""# Cron Job: {job_name} (FAILED)
|
|
|
|
|
|
|
|
|
|
**Job ID:** {job_id}
|
2026-03-03 11:57:18 +05:30
|
|
|
**Run Time:** {_hermes_now().strftime('%Y-%m-%d %H:%M:%S')}
|
2026-02-02 08:26:42 -08:00
|
|
|
**Schedule:** {job.get('schedule_display', 'N/A')}
|
|
|
|
|
|
|
|
|
|
## Prompt
|
|
|
|
|
|
|
|
|
|
{prompt}
|
|
|
|
|
|
|
|
|
|
## Error
|
|
|
|
|
|
|
|
|
|
```
|
|
|
|
|
{error_msg}
|
|
|
|
|
|
|
|
|
|
{traceback.format_exc()}
|
|
|
|
|
```
|
|
|
|
|
"""
|
2026-02-22 17:14:44 -08:00
|
|
|
return False, output, "", error_msg
|
|
|
|
|
|
|
|
|
|
finally:
|
|
|
|
|
# Clean up injected env vars so they don't leak to other jobs
|
2026-03-14 19:07:50 -07:00
|
|
|
for key in (
|
|
|
|
|
"HERMES_SESSION_PLATFORM",
|
|
|
|
|
"HERMES_SESSION_CHAT_ID",
|
|
|
|
|
"HERMES_SESSION_CHAT_NAME",
|
|
|
|
|
"HERMES_CRON_AUTO_DELIVER_PLATFORM",
|
|
|
|
|
"HERMES_CRON_AUTO_DELIVER_CHAT_ID",
|
|
|
|
|
"HERMES_CRON_AUTO_DELIVER_THREAD_ID",
|
|
|
|
|
):
|
2026-02-22 17:14:44 -08:00
|
|
|
os.environ.pop(key, None)
|
2026-03-14 00:12:34 -07:00
|
|
|
if _session_db:
|
|
|
|
|
try:
|
|
|
|
|
_session_db.close()
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.debug("Job '%s': failed to close SQLite session store: %s", job_id, e)
|
2026-02-02 08:26:42 -08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
def tick(verbose: bool = True) -> int:
|
|
|
|
|
"""
|
|
|
|
|
Check and run all due jobs.
|
|
|
|
|
|
2026-02-21 16:21:19 -08:00
|
|
|
Uses a file lock so only one tick runs at a time, even if the gateway's
|
|
|
|
|
in-process ticker and a standalone daemon or manual tick overlap.
|
2026-02-02 08:26:42 -08:00
|
|
|
|
|
|
|
|
Args:
|
|
|
|
|
verbose: Whether to print status messages
|
|
|
|
|
|
|
|
|
|
Returns:
|
2026-02-21 16:21:19 -08:00
|
|
|
Number of jobs executed (0 if another tick is already running)
|
2026-02-02 08:26:42 -08:00
|
|
|
"""
|
2026-02-21 16:21:19 -08:00
|
|
|
_LOCK_DIR.mkdir(parents=True, exist_ok=True)
|
2026-02-02 08:26:42 -08:00
|
|
|
|
2026-02-25 16:27:40 -08:00
|
|
|
# Cross-platform file locking: fcntl on Unix, msvcrt on Windows
|
2026-03-03 02:09:56 +03:30
|
|
|
lock_fd = None
|
2026-02-21 16:21:19 -08:00
|
|
|
try:
|
|
|
|
|
lock_fd = open(_LOCK_FILE, "w")
|
2026-02-25 16:27:40 -08:00
|
|
|
if fcntl:
|
|
|
|
|
fcntl.flock(lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB)
|
|
|
|
|
elif msvcrt:
|
|
|
|
|
msvcrt.locking(lock_fd.fileno(), msvcrt.LK_NBLCK, 1)
|
2026-02-21 16:21:19 -08:00
|
|
|
except (OSError, IOError):
|
|
|
|
|
logger.debug("Tick skipped — another instance holds the lock")
|
2026-03-03 02:09:56 +03:30
|
|
|
if lock_fd is not None:
|
|
|
|
|
lock_fd.close()
|
2026-02-21 16:21:19 -08:00
|
|
|
return 0
|
2026-02-02 08:26:42 -08:00
|
|
|
|
|
|
|
|
try:
|
2026-02-21 16:21:19 -08:00
|
|
|
due_jobs = get_due_jobs()
|
|
|
|
|
|
|
|
|
|
if verbose and not due_jobs:
|
2026-03-03 11:57:18 +05:30
|
|
|
logger.info("%s - No jobs due", _hermes_now().strftime('%H:%M:%S'))
|
2026-02-21 16:21:19 -08:00
|
|
|
return 0
|
|
|
|
|
|
|
|
|
|
if verbose:
|
2026-03-03 11:57:18 +05:30
|
|
|
logger.info("%s - %s job(s) due", _hermes_now().strftime('%H:%M:%S'), len(due_jobs))
|
2026-02-21 16:21:19 -08:00
|
|
|
|
|
|
|
|
executed = 0
|
|
|
|
|
for job in due_jobs:
|
2026-02-02 08:26:42 -08:00
|
|
|
try:
|
2026-02-22 17:14:44 -08:00
|
|
|
success, output, final_response, error = run_job(job)
|
2026-02-21 16:21:19 -08:00
|
|
|
|
|
|
|
|
output_file = save_job_output(job["id"], output)
|
|
|
|
|
if verbose:
|
|
|
|
|
logger.info("Output saved to: %s", output_file)
|
|
|
|
|
|
2026-02-22 17:14:44 -08:00
|
|
|
# Deliver the final response to the origin/target chat
|
|
|
|
|
deliver_content = final_response if success else f"⚠️ Cron job '{job.get('name', job['id'])}' failed:\n{error}"
|
|
|
|
|
if deliver_content:
|
|
|
|
|
try:
|
|
|
|
|
_deliver_result(job, deliver_content)
|
|
|
|
|
except Exception as de:
|
|
|
|
|
logger.error("Delivery failed for job %s: %s", job["id"], de)
|
|
|
|
|
|
2026-02-21 16:21:19 -08:00
|
|
|
mark_job_run(job["id"], success, error)
|
|
|
|
|
executed += 1
|
|
|
|
|
|
2026-02-02 08:26:42 -08:00
|
|
|
except Exception as e:
|
2026-02-21 16:21:19 -08:00
|
|
|
logger.error("Error processing job %s: %s", job['id'], e)
|
|
|
|
|
mark_job_run(job["id"], False, str(e))
|
|
|
|
|
|
|
|
|
|
return executed
|
|
|
|
|
finally:
|
2026-02-25 16:27:40 -08:00
|
|
|
if fcntl:
|
|
|
|
|
fcntl.flock(lock_fd, fcntl.LOCK_UN)
|
|
|
|
|
elif msvcrt:
|
|
|
|
|
try:
|
|
|
|
|
msvcrt.locking(lock_fd.fileno(), msvcrt.LK_UNLCK, 1)
|
|
|
|
|
except (OSError, IOError):
|
|
|
|
|
pass
|
2026-02-21 16:21:19 -08:00
|
|
|
lock_fd.close()
|
2026-02-02 08:26:42 -08:00
|
|
|
|
|
|
|
|
|
|
|
|
|
if __name__ == "__main__":
|
2026-02-21 16:21:19 -08:00
|
|
|
tick(verbose=True)
|