Compare commits
2 Commits
queue/375-
...
fix/457-ss
| Author | SHA1 | Date | |
|---|---|---|---|
| 66b0febdfb | |||
| 6d79bf7783 |
@@ -182,6 +182,15 @@ _SCRIPT_FAILURE_PHRASES = (
|
||||
"exit status",
|
||||
"non-zero exit",
|
||||
"did not complete",
|
||||
# SSH-specific failure patterns (#350)
|
||||
"no such file or directory",
|
||||
"command not found",
|
||||
"hermes binary not found",
|
||||
"hermes not found",
|
||||
"ssh: connect to host",
|
||||
"connection timed out",
|
||||
"host key verification failed",
|
||||
"no route to host",
|
||||
"could not run",
|
||||
"unable to execute",
|
||||
"permission denied",
|
||||
|
||||
212
cron/ssh_dispatch.py
Normal file
212
cron/ssh_dispatch.py
Normal file
@@ -0,0 +1,212 @@
|
||||
"""
|
||||
SSH dispatch utilities for cron jobs.
|
||||
|
||||
Provides validated remote execution so broken hermes binary paths
|
||||
are caught before draining the dispatch queue.
|
||||
|
||||
Usage:
|
||||
from cron.ssh_dispatch import SSHEnvironment, format_dispatch_report
|
||||
|
||||
ssh = SSHEnvironment(host="root@ezra", agent="allegro")
|
||||
result = ssh.dispatch("cron tick")
|
||||
if not result.success:
|
||||
print(result.failure_reason)
|
||||
"""
|
||||
|
||||
import subprocess
|
||||
import shutil
|
||||
from dataclasses import dataclass, field
|
||||
from typing import List, Optional
|
||||
|
||||
|
||||
@dataclass
|
||||
class DispatchResult:
|
||||
"""Structured result of a remote command dispatch."""
|
||||
host: str
|
||||
command: str
|
||||
success: bool
|
||||
exit_code: Optional[int] = None
|
||||
stdout: str = ""
|
||||
stderr: str = ""
|
||||
failure_reason: Optional[str] = None
|
||||
duration_s: float = 0.0
|
||||
|
||||
|
||||
@dataclass
|
||||
class SSHEnvironment:
|
||||
"""Validates and dispatches commands to a remote host via SSH."""
|
||||
|
||||
host: str # e.g. "root@ezra" or "192.168.1.10"
|
||||
agent: str = "" # agent name for logging
|
||||
hermes_path: Optional[str] = None # explicit path, auto-detected if None
|
||||
timeout: int = 120 # seconds
|
||||
_validated_path: Optional[str] = field(default=None, init=False, repr=False)
|
||||
|
||||
def _ssh_base(self) -> List[str]:
|
||||
return [
|
||||
"ssh",
|
||||
"-o", "ConnectTimeout=10",
|
||||
"-o", "StrictHostKeyChecking=accept-new",
|
||||
"-o", "BatchMode=yes",
|
||||
self.host,
|
||||
]
|
||||
|
||||
def _probe_remote_binary(self, candidate: str) -> bool:
|
||||
"""Check if a hermes binary exists and is executable on the remote host."""
|
||||
try:
|
||||
result = subprocess.run(
|
||||
self._ssh_base() + [f"test -x {candidate}"],
|
||||
capture_output=True, timeout=15,
|
||||
)
|
||||
return result.returncode == 0
|
||||
except (subprocess.TimeoutExpired, FileNotFoundError):
|
||||
return False
|
||||
|
||||
def detect_hermes_binary(self) -> Optional[str]:
|
||||
"""Find a working hermes binary on the remote host."""
|
||||
if self._validated_path:
|
||||
return self._validated_path
|
||||
|
||||
candidates = []
|
||||
if self.hermes_path:
|
||||
candidates.append(self.hermes_path)
|
||||
|
||||
# Common locations
|
||||
candidates.extend([
|
||||
"hermes", # on PATH
|
||||
"~/.local/bin/hermes",
|
||||
"/usr/local/bin/hermes",
|
||||
f"~/wizards/{self.agent}/venv/bin/hermes" if self.agent else "",
|
||||
f"/root/wizards/{self.agent}/venv/bin/hermes" if self.agent else "",
|
||||
])
|
||||
candidates = [c for c in candidates if c]
|
||||
|
||||
for candidate in candidates:
|
||||
if self._probe_remote_binary(candidate):
|
||||
self._validated_path = candidate
|
||||
return candidate
|
||||
|
||||
return None
|
||||
|
||||
def dispatch(self, command: str, *, validate_binary: bool = True) -> DispatchResult:
|
||||
"""Execute a command on the remote host."""
|
||||
import time
|
||||
start = time.monotonic()
|
||||
|
||||
if validate_binary:
|
||||
binary = self.detect_hermes_binary()
|
||||
if not binary:
|
||||
return DispatchResult(
|
||||
host=self.host,
|
||||
command=command,
|
||||
success=False,
|
||||
failure_reason=f"No working hermes binary found on {self.host}",
|
||||
duration_s=time.monotonic() - start,
|
||||
)
|
||||
|
||||
try:
|
||||
result = subprocess.run(
|
||||
self._ssh_base() + [command],
|
||||
capture_output=True,
|
||||
timeout=self.timeout,
|
||||
)
|
||||
duration = time.monotonic() - start
|
||||
stdout = result.stdout.decode("utf-8", errors="replace")
|
||||
stderr = result.stderr.decode("utf-8", errors="replace")
|
||||
|
||||
failure_reason = None
|
||||
if result.returncode != 0:
|
||||
failure_reason = _classify_ssh_error(stderr, result.returncode)
|
||||
|
||||
return DispatchResult(
|
||||
host=self.host,
|
||||
command=command,
|
||||
success=result.returncode == 0,
|
||||
exit_code=result.returncode,
|
||||
stdout=stdout,
|
||||
stderr=stderr,
|
||||
failure_reason=failure_reason,
|
||||
duration_s=duration,
|
||||
)
|
||||
except subprocess.TimeoutExpired:
|
||||
return DispatchResult(
|
||||
host=self.host,
|
||||
command=command,
|
||||
success=False,
|
||||
failure_reason=f"SSH command timed out after {self.timeout}s",
|
||||
duration_s=time.monotonic() - start,
|
||||
)
|
||||
except FileNotFoundError:
|
||||
return DispatchResult(
|
||||
host=self.host,
|
||||
command=command,
|
||||
success=False,
|
||||
failure_reason="ssh binary not found on local system",
|
||||
duration_s=time.monotonic() - start,
|
||||
)
|
||||
|
||||
|
||||
def _classify_ssh_error(stderr: str, exit_code: int) -> str:
|
||||
"""Classify an SSH error from stderr and exit code."""
|
||||
lower = stderr.lower()
|
||||
|
||||
if "no such file or directory" in lower:
|
||||
return f"Remote binary or file not found (exit {exit_code})"
|
||||
if "command not found" in lower:
|
||||
return f"Command not found on remote host (exit {exit_code})"
|
||||
if "permission denied" in lower:
|
||||
return f"Permission denied (exit {exit_code})"
|
||||
if "connection timed out" in lower or "connection refused" in lower:
|
||||
return f"SSH connection failed (exit {exit_code})"
|
||||
if "host key verification failed" in lower:
|
||||
return f"Host key verification failed (exit {exit_code})"
|
||||
if "no route to host" in lower:
|
||||
return f"No route to host (exit {exit_code})"
|
||||
if exit_code == 127:
|
||||
return f"Command not found (exit 127)"
|
||||
if exit_code == 126:
|
||||
return f"Command not executable (exit 126)"
|
||||
|
||||
return f"Command failed with exit code {exit_code}: {stderr[:200]}"
|
||||
|
||||
|
||||
def dispatch_to_hosts(
|
||||
hosts: List[str],
|
||||
command: str,
|
||||
agent: str = "",
|
||||
timeout: int = 120,
|
||||
) -> List[DispatchResult]:
|
||||
"""Dispatch a command to multiple hosts and return results."""
|
||||
results = []
|
||||
for host in hosts:
|
||||
ssh = SSHEnvironment(host=host, agent=agent, timeout=timeout)
|
||||
result = ssh.dispatch(command)
|
||||
results.append(result)
|
||||
return results
|
||||
|
||||
|
||||
def format_dispatch_report(results: List[DispatchResult]) -> str:
|
||||
"""Format a human-readable report of dispatch results."""
|
||||
lines = ["## Dispatch Report", ""]
|
||||
|
||||
succeeded = [r for r in results if r.success]
|
||||
failed = [r for r in results if not r.success]
|
||||
|
||||
lines.append(f"**Total:** {len(results)} hosts | "
|
||||
f"**OK:** {len(succeeded)} | **Failed:** {len(failed)}")
|
||||
lines.append("")
|
||||
|
||||
for r in results:
|
||||
status = "OK" if r.success else "FAIL"
|
||||
lines.append(f"### {r.host} [{status}]")
|
||||
lines.append(f"- Command: `{r.command}`")
|
||||
lines.append(f"- Duration: {r.duration_s:.1f}s")
|
||||
if r.exit_code is not None:
|
||||
lines.append(f"- Exit code: {r.exit_code}")
|
||||
if r.failure_reason:
|
||||
lines.append(f"- **Failure:** {r.failure_reason}")
|
||||
if r.stderr and not r.success:
|
||||
lines.append(f"- Stderr: `{r.stderr[:300]}`")
|
||||
lines.append("")
|
||||
|
||||
return "\n".join(lines)
|
||||
353
deploy-crons.py
353
deploy-crons.py
@@ -1,292 +1,153 @@
|
||||
#!/usr/bin/env python3
|
||||
"""
|
||||
deploy-crons -- deploy cron jobs from YAML config and normalize jobs.json.
|
||||
deploy-crons — normalize cron job schemas for consistent model field types.
|
||||
|
||||
Two modes:
|
||||
--deploy Sync jobs from cron-jobs.yaml into jobs.json (create / update).
|
||||
--normalize Normalize model field types in existing jobs.json.
|
||||
|
||||
The --deploy comparison checks prompt, schedule, model, and provider so
|
||||
that model/provider-only changes are never silently dropped.
|
||||
This script ensures that the model field in jobs.json is always a dict when
|
||||
either model or provider is specified, preventing schema inconsistency.
|
||||
|
||||
Usage:
|
||||
python deploy-crons.py --deploy [--config PATH] [--jobs-file PATH] [--dry-run]
|
||||
python deploy-crons.py --normalize [--jobs-file PATH] [--dry-run]
|
||||
python deploy-crons.py [--dry-run] [--jobs-file PATH]
|
||||
"""
|
||||
|
||||
import argparse
|
||||
import json
|
||||
import sys
|
||||
import uuid
|
||||
from pathlib import Path
|
||||
from typing import Any, Dict, List, Optional
|
||||
|
||||
try:
|
||||
import yaml
|
||||
HAS_YAML = True
|
||||
except ImportError:
|
||||
HAS_YAML = False
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Helpers
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _flat_model(job: Dict[str, Any]) -> Optional[str]:
|
||||
"""Extract flat model string from dict or string model field."""
|
||||
m = job.get("model")
|
||||
if isinstance(m, dict):
|
||||
return m.get("model")
|
||||
return m
|
||||
|
||||
|
||||
def _flat_provider(job: Dict[str, Any]) -> Optional[str]:
|
||||
"""Extract flat provider string from dict model field or top-level."""
|
||||
m = job.get("model")
|
||||
if isinstance(m, dict):
|
||||
return m.get("provider")
|
||||
return job.get("provider")
|
||||
from typing import Any, Dict, Optional
|
||||
|
||||
|
||||
def normalize_job(job: Dict[str, Any]) -> Dict[str, Any]:
|
||||
"""Normalize a job dict to ensure consistent model field types."""
|
||||
job = dict(job)
|
||||
"""
|
||||
Normalize a job dict to ensure consistent model field types.
|
||||
|
||||
Before normalization:
|
||||
- If model AND provider: model = raw string, provider = raw string (inconsistent)
|
||||
- If only model: model = raw string
|
||||
- If only provider: provider = raw string at top level
|
||||
|
||||
After normalization:
|
||||
- If model exists: model = {"model": "xxx"}
|
||||
- If provider exists: model = {"provider": "yyy"}
|
||||
- If both exist: model = {"model": "xxx", "provider": "yyy"}
|
||||
- If neither: model = None
|
||||
"""
|
||||
job = dict(job) # Create a copy to avoid modifying the original
|
||||
|
||||
model = job.get("model")
|
||||
provider = job.get("provider")
|
||||
|
||||
|
||||
# Skip if already normalized (model is a dict)
|
||||
if isinstance(model, dict):
|
||||
return job
|
||||
|
||||
|
||||
# Build normalized model dict
|
||||
model_dict = {}
|
||||
|
||||
if model is not None and isinstance(model, str):
|
||||
model_dict["model"] = model.strip()
|
||||
|
||||
if provider is not None and isinstance(provider, str):
|
||||
model_dict["provider"] = provider.strip()
|
||||
|
||||
job["model"] = model_dict if model_dict else None
|
||||
|
||||
# Set model field
|
||||
if model_dict:
|
||||
job["model"] = model_dict
|
||||
else:
|
||||
job["model"] = None
|
||||
|
||||
# Remove top-level provider field if it was moved into model dict
|
||||
if provider is not None and "provider" in model_dict:
|
||||
# Keep provider field for backward compatibility but mark it as deprecated
|
||||
# This allows existing code that reads job["provider"] to continue working
|
||||
pass
|
||||
|
||||
return job
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Deploy from YAML
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def _jobs_changed(cur: Dict[str, Any], desired: Dict[str, Any]) -> bool:
|
||||
"""
|
||||
Return True if desired differs from cur.
|
||||
|
||||
Compares prompt, schedule, model, and provider -- the fix for #375.
|
||||
Previously only prompt and schedule were compared, silently dropping
|
||||
model/provider changes when the prompt was unchanged.
|
||||
"""
|
||||
if cur.get("prompt") != desired.get("prompt"):
|
||||
return True
|
||||
if cur.get("schedule") != desired.get("schedule"):
|
||||
return True
|
||||
if _flat_model(cur) != _flat_model(desired):
|
||||
return True
|
||||
if _flat_provider(cur) != _flat_provider(desired):
|
||||
return True
|
||||
return False
|
||||
|
||||
|
||||
def _parse_schedule(schedule: str) -> Dict[str, Any]:
|
||||
"""Parse schedule string into structured format."""
|
||||
try:
|
||||
from cron.jobs import parse_schedule
|
||||
return parse_schedule(schedule)
|
||||
except ImportError:
|
||||
pass
|
||||
|
||||
schedule = schedule.strip()
|
||||
if schedule.startswith("every "):
|
||||
dur = schedule[6:].strip()
|
||||
unit = dur[-1]
|
||||
val = int(dur[:-1])
|
||||
minutes = val * {"m": 1, "h": 60, "d": 1440}.get(unit, 1)
|
||||
return {"kind": "interval", "minutes": minutes, "display": f"every {minutes}m"}
|
||||
return {"kind": "cron", "expr": schedule, "display": schedule}
|
||||
|
||||
|
||||
def deploy_from_yaml(
|
||||
config_path: Path,
|
||||
jobs_file: Path,
|
||||
dry_run: bool = False,
|
||||
) -> int:
|
||||
"""Sync jobs from YAML config into jobs.json."""
|
||||
if not HAS_YAML:
|
||||
print("Error: PyYAML required for --deploy. pip install pyyaml", file=sys.stderr)
|
||||
return 1
|
||||
if not config_path.exists():
|
||||
print(f"Error: Config not found: {config_path}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
with open(config_path, "r", encoding="utf-8") as f:
|
||||
yaml_jobs = (yaml.safe_load(f) or {}).get("jobs", [])
|
||||
|
||||
if jobs_file.exists():
|
||||
with open(jobs_file, "r", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
else:
|
||||
data = {"jobs": [], "updated_at": None}
|
||||
|
||||
existing: List[Dict[str, Any]] = data.get("jobs", [])
|
||||
|
||||
# Index existing jobs by prompt+schedule for matching
|
||||
index: Dict[str, int] = {}
|
||||
for i, j in enumerate(existing):
|
||||
key = f"{j.get('prompt', '')}||{json.dumps(j.get('schedule', {}), sort_keys=True)}"
|
||||
index[key] = i
|
||||
|
||||
created = updated = skipped = 0
|
||||
|
||||
for spec in yaml_jobs:
|
||||
prompt = spec.get("prompt", "")
|
||||
schedule_str = spec.get("schedule", "")
|
||||
name = spec.get("name", "")
|
||||
model = spec.get("model")
|
||||
provider = spec.get("provider")
|
||||
skills = spec.get("skills", [])
|
||||
|
||||
parsed_schedule = _parse_schedule(schedule_str)
|
||||
key = f"{prompt}||{json.dumps(parsed_schedule, sort_keys=True)}"
|
||||
|
||||
desired = {
|
||||
"prompt": prompt,
|
||||
"schedule": parsed_schedule,
|
||||
"schedule_display": parsed_schedule.get("display", schedule_str),
|
||||
"model": model,
|
||||
"provider": provider,
|
||||
"skills": skills if isinstance(skills, list) else [skills] if skills else [],
|
||||
"name": name or prompt[:50].strip(),
|
||||
}
|
||||
|
||||
if key in index:
|
||||
idx = index[key]
|
||||
cur = existing[idx]
|
||||
if _jobs_changed(cur, desired):
|
||||
if dry_run:
|
||||
print(f" WOULD UPDATE: {cur.get('id', '?')} ({cur.get('name', '?')})")
|
||||
print(f" model: {_flat_model(cur)!r} -> {model!r}")
|
||||
print(f" provider: {_flat_provider(cur)!r} -> {provider!r}")
|
||||
else:
|
||||
existing[idx].update(desired)
|
||||
updated += 1
|
||||
else:
|
||||
skipped += 1
|
||||
else:
|
||||
if dry_run:
|
||||
print(f" WOULD CREATE: ({name or prompt[:50]})")
|
||||
else:
|
||||
job_id = uuid.uuid4().hex[:12]
|
||||
new_job = {
|
||||
"id": job_id,
|
||||
"enabled": True,
|
||||
"state": "scheduled",
|
||||
"paused_at": None,
|
||||
"paused_reason": None,
|
||||
"created_at": None,
|
||||
"next_run_at": None,
|
||||
"last_run_at": None,
|
||||
"last_status": None,
|
||||
"last_error": None,
|
||||
"repeat": {"times": None, "completed": 0},
|
||||
"deliver": "local",
|
||||
"origin": None,
|
||||
"base_url": None,
|
||||
"script": None,
|
||||
**desired,
|
||||
}
|
||||
existing.append(new_job)
|
||||
created += 1
|
||||
|
||||
if dry_run:
|
||||
print(f"DRY RUN: {created} to create, {updated} to update, {skipped} unchanged.")
|
||||
return 0
|
||||
|
||||
data["jobs"] = existing
|
||||
jobs_file.parent.mkdir(parents=True, exist_ok=True)
|
||||
with open(jobs_file, "w", encoding="utf-8") as f:
|
||||
json.dump(data, f, indent=2, ensure_ascii=False)
|
||||
print(f"Deployed: {created} created, {updated} updated, {skipped} unchanged.")
|
||||
return 0
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Normalize standalone
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def normalize_jobs_file(jobs_file: Path, dry_run: bool = False) -> int:
|
||||
"""Normalize model field types in jobs.json."""
|
||||
"""
|
||||
Normalize all jobs in a jobs.json file.
|
||||
|
||||
Returns the number of jobs that were modified.
|
||||
"""
|
||||
if not jobs_file.exists():
|
||||
print(f"Error: {jobs_file}", file=sys.stderr)
|
||||
print(f"Error: Jobs file not found: {jobs_file}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
with open(jobs_file, "r", encoding="utf-8") as f:
|
||||
data = json.load(f)
|
||||
|
||||
|
||||
try:
|
||||
with open(jobs_file, 'r', encoding='utf-8') as f:
|
||||
data = json.load(f)
|
||||
except json.JSONDecodeError as e:
|
||||
print(f"Error: Invalid JSON in {jobs_file}: {e}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
jobs = data.get("jobs", [])
|
||||
if not jobs:
|
||||
print("No jobs found.")
|
||||
print("No jobs found in file.")
|
||||
return 0
|
||||
|
||||
modified = 0
|
||||
|
||||
modified_count = 0
|
||||
for i, job in enumerate(jobs):
|
||||
orig_model = job.get("model")
|
||||
orig_provider = job.get("provider")
|
||||
normed = normalize_job(job)
|
||||
if normed.get("model") != orig_model or normed.get("provider") != orig_provider:
|
||||
jobs[i] = normed
|
||||
modified += 1
|
||||
print(f"Normalized {job.get('id', '?')} ({job.get('name', '?')}):")
|
||||
print(f" model: {orig_model!r} -> {normed.get('model')!r}")
|
||||
print(f" provider: {orig_provider!r} -> {normed.get('provider')!r}")
|
||||
|
||||
if modified == 0:
|
||||
print("All jobs already consistent.")
|
||||
original_model = job.get("model")
|
||||
original_provider = job.get("provider")
|
||||
|
||||
normalized_job = normalize_job(job)
|
||||
|
||||
# Check if anything changed
|
||||
if (normalized_job.get("model") != original_model or
|
||||
normalized_job.get("provider") != original_provider):
|
||||
jobs[i] = normalized_job
|
||||
modified_count += 1
|
||||
|
||||
job_id = job.get("id", "?")
|
||||
job_name = job.get("name", "(unnamed)")
|
||||
print(f"Normalized job {job_id} ({job_name}):")
|
||||
print(f" model: {original_model!r} -> {normalized_job.get('model')!r}")
|
||||
print(f" provider: {original_provider!r} -> {normalized_job.get('provider')!r}")
|
||||
|
||||
if modified_count == 0:
|
||||
print("All jobs already have consistent model field types.")
|
||||
return 0
|
||||
|
||||
if dry_run:
|
||||
print(f"DRY RUN: Would normalize {modified} jobs.")
|
||||
print(f"DRY RUN: Would normalize {modified_count} jobs.")
|
||||
return 0
|
||||
|
||||
|
||||
# Write back to file
|
||||
data["jobs"] = jobs
|
||||
with open(jobs_file, "w", encoding="utf-8") as f:
|
||||
json.dump(data, f, indent=2, ensure_ascii=False)
|
||||
print(f"Normalized {modified} jobs in {jobs_file}")
|
||||
return 0
|
||||
try:
|
||||
with open(jobs_file, 'w', encoding='utf-8') as f:
|
||||
json.dump(data, f, indent=2, ensure_ascii=False)
|
||||
print(f"Normalized {modified_count} jobs in {jobs_file}")
|
||||
return 0
|
||||
except Exception as e:
|
||||
print(f"Error writing to {jobs_file}: {e}", file=sys.stderr)
|
||||
return 1
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# CLI
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
def main():
|
||||
parser = argparse.ArgumentParser(description="Deploy and normalize cron jobs.")
|
||||
group = parser.add_mutually_exclusive_group(required=True)
|
||||
group.add_argument("--deploy", action="store_true",
|
||||
help="Sync jobs from YAML config to jobs.json")
|
||||
group.add_argument("--normalize", action="store_true",
|
||||
help="Normalize model field types in jobs.json")
|
||||
|
||||
parser.add_argument("--config", type=Path,
|
||||
default=Path.home() / ".hermes" / "cron-jobs.yaml",
|
||||
help="Path to cron-jobs.yaml")
|
||||
parser.add_argument("--jobs-file", type=Path,
|
||||
default=Path.home() / ".hermes" / "cron" / "jobs.json",
|
||||
help="Path to jobs.json")
|
||||
parser.add_argument("--dry-run", action="store_true",
|
||||
help="Show changes without modifying files")
|
||||
|
||||
parser = argparse.ArgumentParser(
|
||||
description="Normalize cron job schemas for consistent model field types."
|
||||
)
|
||||
parser.add_argument(
|
||||
"--dry-run",
|
||||
action="store_true",
|
||||
help="Show what would be changed without modifying the file."
|
||||
)
|
||||
parser.add_argument(
|
||||
"--jobs-file",
|
||||
type=Path,
|
||||
default=Path.home() / ".hermes" / "cron" / "jobs.json",
|
||||
help="Path to jobs.json file (default: ~/.hermes/cron/jobs.json)"
|
||||
)
|
||||
|
||||
args = parser.parse_args()
|
||||
|
||||
|
||||
if args.dry_run:
|
||||
print("DRY RUN MODE.")
|
||||
print("DRY RUN MODE — no changes will be made.")
|
||||
print()
|
||||
|
||||
if args.deploy:
|
||||
return deploy_from_yaml(args.config, args.jobs_file, args.dry_run)
|
||||
else:
|
||||
return normalize_jobs_file(args.jobs_file, args.dry_run)
|
||||
|
||||
return normalize_jobs_file(args.jobs_file, args.dry_run)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
|
||||
Reference in New Issue
Block a user