Files
timmy-home/scripts/evennia/launch_timmy_evennia_local.sh
2026-03-28 13:33:26 -04:00

71 lines
1.9 KiB
Bash
Executable File

#!/usr/bin/env bash
set -euo pipefail
export HERMES_HOME="${HERMES_HOME:-$HOME/.hermes-local}"
SRC_CFG="$HOME/.hermes/config.yaml"
DST_CFG="$HERMES_HOME/config.yaml"
REPO_ROOT="${REPO_ROOT:-$HOME/.timmy}"
mkdir -p "$HERMES_HOME"
if [ ! -f "$DST_CFG" ]; then
cp "$SRC_CFG" "$DST_CFG"
fi
if [ ! -e "$HERMES_HOME/skins" ]; then
ln -s "$HOME/.hermes/skins" "$HERMES_HOME/skins"
fi
python3 - <<'PY'
from pathlib import Path
import yaml
home = Path.home()
cfg_path = home / '.hermes-local' / 'config.yaml'
data = yaml.safe_load(cfg_path.read_text()) or {}
model = data.get('model')
if not isinstance(model, dict):
model = {'default': str(model)} if model else {}
data['model'] = model
model['default'] = 'NousResearch_Hermes-4-14B-Q4_K_M.gguf'
model['provider'] = 'custom'
model['base_url'] = 'http://localhost:8081/v1'
model['context_length'] = 65536
providers = data.get('custom_providers')
if not isinstance(providers, list):
providers = []
data['custom_providers'] = providers
found = False
for entry in providers:
if isinstance(entry, dict) and entry.get('name') == 'Local llama.cpp':
entry['base_url'] = 'http://localhost:8081/v1'
entry['api_key'] = 'none'
entry['model'] = 'NousResearch_Hermes-4-14B-Q4_K_M.gguf'
found = True
break
if not found:
providers.insert(0, {
'name': 'Local llama.cpp',
'base_url': 'http://localhost:8081/v1',
'api_key': 'none',
'model': 'NousResearch_Hermes-4-14B-Q4_K_M.gguf',
})
import os
repo_root = Path(os.environ.get('REPO_ROOT', str(Path.home() / '.timmy'))).expanduser()
script_path = repo_root / 'scripts' / 'evennia' / 'evennia_mcp_server.py'
data['mcp_servers'] = {
'evennia': {
'command': 'python3',
'args': [str(script_path)],
'env': {},
'timeout': 30,
}
}
cfg_path.write_text(yaml.safe_dump(data, sort_keys=False, allow_unicode=True))
PY
exec hermes chat "$@"