feat: mount skill credential files + fix env passthrough for remote backends (#3671)
Two related fixes for remote terminal backends (Modal/Docker): 1. NEW: Credential file mounting system Skills declare required_credential_files in frontmatter. Files are mounted into Docker (read-only bind mounts) and Modal (mounts at creation + sync via exec on each command for mid-session changes). Google Workspace skill updated with the new field. 2. FIX: Docker backend now includes env_passthrough vars Skills that declare required_environment_variables (e.g. Notion with NOTION_API_KEY) register vars in the env_passthrough system. The local backend checked this, but Docker's forward_env was a separate disconnected list. Now Docker exec merges both sources, so skill-declared env vars are forwarded into containers automatically. This fixes the reported issue where NOTION_API_KEY in ~/.hermes/.env wasn't reaching the Docker container despite being registered via the Notion skill's prerequisites. Closes #3665
This commit is contained in:
@@ -4,6 +4,11 @@ description: Gmail, Calendar, Drive, Contacts, Sheets, and Docs integration via
|
||||
version: 1.0.0
|
||||
author: Nous Research
|
||||
license: MIT
|
||||
required_credential_files:
|
||||
- path: google_token.json
|
||||
description: Google OAuth2 token (created by setup script)
|
||||
- path: google_client_secret.json
|
||||
description: Google OAuth2 client credentials (downloaded from Google Cloud Console)
|
||||
metadata:
|
||||
hermes:
|
||||
tags: [Google, Gmail, Calendar, Drive, Sheets, Docs, Contacts, Email, OAuth]
|
||||
|
||||
158
tests/tools/test_credential_files.py
Normal file
158
tests/tools/test_credential_files.py
Normal file
@@ -0,0 +1,158 @@
|
||||
"""Tests for credential file passthrough registry (tools/credential_files.py)."""
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
from tools.credential_files import (
|
||||
clear_credential_files,
|
||||
get_credential_file_mounts,
|
||||
register_credential_file,
|
||||
register_credential_files,
|
||||
reset_config_cache,
|
||||
)
|
||||
|
||||
|
||||
@pytest.fixture(autouse=True)
|
||||
def _clean_registry():
|
||||
"""Reset registry between tests."""
|
||||
clear_credential_files()
|
||||
reset_config_cache()
|
||||
yield
|
||||
clear_credential_files()
|
||||
reset_config_cache()
|
||||
|
||||
|
||||
class TestRegisterCredentialFile:
|
||||
def test_registers_existing_file(self, tmp_path, monkeypatch):
|
||||
monkeypatch.setenv("HERMES_HOME", str(tmp_path))
|
||||
(tmp_path / "token.json").write_text('{"token": "abc"}')
|
||||
|
||||
result = register_credential_file("token.json")
|
||||
|
||||
assert result is True
|
||||
mounts = get_credential_file_mounts()
|
||||
assert len(mounts) == 1
|
||||
assert mounts[0]["host_path"] == str(tmp_path / "token.json")
|
||||
assert mounts[0]["container_path"] == "/root/.hermes/token.json"
|
||||
|
||||
def test_skips_missing_file(self, tmp_path, monkeypatch):
|
||||
monkeypatch.setenv("HERMES_HOME", str(tmp_path))
|
||||
|
||||
result = register_credential_file("nonexistent.json")
|
||||
|
||||
assert result is False
|
||||
assert get_credential_file_mounts() == []
|
||||
|
||||
def test_custom_container_base(self, tmp_path, monkeypatch):
|
||||
monkeypatch.setenv("HERMES_HOME", str(tmp_path))
|
||||
(tmp_path / "cred.json").write_text("{}")
|
||||
|
||||
register_credential_file("cred.json", container_base="/home/user/.hermes")
|
||||
|
||||
mounts = get_credential_file_mounts()
|
||||
assert mounts[0]["container_path"] == "/home/user/.hermes/cred.json"
|
||||
|
||||
def test_deduplicates_by_container_path(self, tmp_path, monkeypatch):
|
||||
monkeypatch.setenv("HERMES_HOME", str(tmp_path))
|
||||
(tmp_path / "token.json").write_text("{}")
|
||||
|
||||
register_credential_file("token.json")
|
||||
register_credential_file("token.json")
|
||||
|
||||
mounts = get_credential_file_mounts()
|
||||
assert len(mounts) == 1
|
||||
|
||||
|
||||
class TestRegisterCredentialFiles:
|
||||
def test_string_entries(self, tmp_path, monkeypatch):
|
||||
monkeypatch.setenv("HERMES_HOME", str(tmp_path))
|
||||
(tmp_path / "a.json").write_text("{}")
|
||||
(tmp_path / "b.json").write_text("{}")
|
||||
|
||||
missing = register_credential_files(["a.json", "b.json"])
|
||||
|
||||
assert missing == []
|
||||
assert len(get_credential_file_mounts()) == 2
|
||||
|
||||
def test_dict_entries(self, tmp_path, monkeypatch):
|
||||
monkeypatch.setenv("HERMES_HOME", str(tmp_path))
|
||||
(tmp_path / "token.json").write_text("{}")
|
||||
|
||||
missing = register_credential_files([
|
||||
{"path": "token.json", "description": "OAuth token"},
|
||||
])
|
||||
|
||||
assert missing == []
|
||||
assert len(get_credential_file_mounts()) == 1
|
||||
|
||||
def test_returns_missing_files(self, tmp_path, monkeypatch):
|
||||
monkeypatch.setenv("HERMES_HOME", str(tmp_path))
|
||||
(tmp_path / "exists.json").write_text("{}")
|
||||
|
||||
missing = register_credential_files([
|
||||
"exists.json",
|
||||
"missing.json",
|
||||
{"path": "also_missing.json"},
|
||||
])
|
||||
|
||||
assert missing == ["missing.json", "also_missing.json"]
|
||||
assert len(get_credential_file_mounts()) == 1
|
||||
|
||||
def test_empty_list(self, tmp_path, monkeypatch):
|
||||
monkeypatch.setenv("HERMES_HOME", str(tmp_path))
|
||||
assert register_credential_files([]) == []
|
||||
|
||||
|
||||
class TestConfigCredentialFiles:
|
||||
def test_loads_from_config(self, tmp_path, monkeypatch):
|
||||
monkeypatch.setenv("HERMES_HOME", str(tmp_path))
|
||||
(tmp_path / "oauth.json").write_text("{}")
|
||||
(tmp_path / "config.yaml").write_text(
|
||||
"terminal:\n credential_files:\n - oauth.json\n"
|
||||
)
|
||||
|
||||
mounts = get_credential_file_mounts()
|
||||
|
||||
assert len(mounts) == 1
|
||||
assert mounts[0]["host_path"] == str(tmp_path / "oauth.json")
|
||||
|
||||
def test_config_skips_missing_files(self, tmp_path, monkeypatch):
|
||||
monkeypatch.setenv("HERMES_HOME", str(tmp_path))
|
||||
(tmp_path / "config.yaml").write_text(
|
||||
"terminal:\n credential_files:\n - nonexistent.json\n"
|
||||
)
|
||||
|
||||
mounts = get_credential_file_mounts()
|
||||
assert mounts == []
|
||||
|
||||
def test_combines_skill_and_config(self, tmp_path, monkeypatch):
|
||||
monkeypatch.setenv("HERMES_HOME", str(tmp_path))
|
||||
(tmp_path / "skill_token.json").write_text("{}")
|
||||
(tmp_path / "config_token.json").write_text("{}")
|
||||
(tmp_path / "config.yaml").write_text(
|
||||
"terminal:\n credential_files:\n - config_token.json\n"
|
||||
)
|
||||
|
||||
register_credential_file("skill_token.json")
|
||||
mounts = get_credential_file_mounts()
|
||||
|
||||
assert len(mounts) == 2
|
||||
paths = {m["container_path"] for m in mounts}
|
||||
assert "/root/.hermes/skill_token.json" in paths
|
||||
assert "/root/.hermes/config_token.json" in paths
|
||||
|
||||
|
||||
class TestGetMountsRechecksExistence:
|
||||
def test_removed_file_excluded_from_mounts(self, tmp_path, monkeypatch):
|
||||
monkeypatch.setenv("HERMES_HOME", str(tmp_path))
|
||||
token = tmp_path / "token.json"
|
||||
token.write_text("{}")
|
||||
|
||||
register_credential_file("token.json")
|
||||
assert len(get_credential_file_mounts()) == 1
|
||||
|
||||
# Delete the file after registration
|
||||
token.unlink()
|
||||
assert get_credential_file_mounts() == []
|
||||
163
tools/credential_files.py
Normal file
163
tools/credential_files.py
Normal file
@@ -0,0 +1,163 @@
|
||||
"""Credential file passthrough registry for remote terminal backends.
|
||||
|
||||
Skills that declare ``required_credential_files`` in their frontmatter need
|
||||
those files available inside sandboxed execution environments (Modal, Docker).
|
||||
By default remote backends create bare containers with no host files.
|
||||
|
||||
This module provides a session-scoped registry so skill-declared credential
|
||||
files (and user-configured overrides) are mounted into remote sandboxes.
|
||||
|
||||
Two sources feed the registry:
|
||||
|
||||
1. **Skill declarations** — when a skill is loaded via ``skill_view``, its
|
||||
``required_credential_files`` entries are registered here if the files
|
||||
exist on the host.
|
||||
2. **User config** — ``terminal.credential_files`` in config.yaml lets users
|
||||
explicitly list additional files to mount.
|
||||
|
||||
Remote backends (``tools/environments/modal.py``, ``docker.py``) call
|
||||
:func:`get_credential_file_mounts` at sandbox creation time.
|
||||
|
||||
Each registered entry is a dict::
|
||||
|
||||
{
|
||||
"host_path": "/home/user/.hermes/google_token.json",
|
||||
"container_path": "/root/.hermes/google_token.json",
|
||||
}
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import logging
|
||||
import os
|
||||
from pathlib import Path
|
||||
from typing import Dict, List
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Session-scoped list of credential files to mount.
|
||||
# Key: container_path (deduplicated), Value: host_path
|
||||
_registered_files: Dict[str, str] = {}
|
||||
|
||||
# Cache for config-based file list (loaded once per process).
|
||||
_config_files: List[Dict[str, str]] | None = None
|
||||
|
||||
|
||||
def _resolve_hermes_home() -> Path:
|
||||
return Path(os.environ.get("HERMES_HOME", Path.home() / ".hermes"))
|
||||
|
||||
|
||||
def register_credential_file(
|
||||
relative_path: str,
|
||||
container_base: str = "/root/.hermes",
|
||||
) -> bool:
|
||||
"""Register a credential file for mounting into remote sandboxes.
|
||||
|
||||
*relative_path* is relative to ``HERMES_HOME`` (e.g. ``google_token.json``).
|
||||
Returns True if the file exists on the host and was registered.
|
||||
"""
|
||||
hermes_home = _resolve_hermes_home()
|
||||
host_path = hermes_home / relative_path
|
||||
if not host_path.is_file():
|
||||
logger.debug("credential_files: skipping %s (not found)", host_path)
|
||||
return False
|
||||
|
||||
container_path = f"{container_base.rstrip('/')}/{relative_path}"
|
||||
_registered_files[container_path] = str(host_path)
|
||||
logger.debug("credential_files: registered %s -> %s", host_path, container_path)
|
||||
return True
|
||||
|
||||
|
||||
def register_credential_files(
|
||||
entries: list,
|
||||
container_base: str = "/root/.hermes",
|
||||
) -> List[str]:
|
||||
"""Register multiple credential files from skill frontmatter entries.
|
||||
|
||||
Each entry is either a string (relative path) or a dict with a ``path``
|
||||
key. Returns the list of relative paths that were NOT found on the host
|
||||
(i.e. missing files).
|
||||
"""
|
||||
missing = []
|
||||
for entry in entries:
|
||||
if isinstance(entry, str):
|
||||
rel_path = entry.strip()
|
||||
elif isinstance(entry, dict):
|
||||
rel_path = (entry.get("path") or "").strip()
|
||||
else:
|
||||
continue
|
||||
if not rel_path:
|
||||
continue
|
||||
if not register_credential_file(rel_path, container_base):
|
||||
missing.append(rel_path)
|
||||
return missing
|
||||
|
||||
|
||||
def _load_config_files() -> List[Dict[str, str]]:
|
||||
"""Load ``terminal.credential_files`` from config.yaml (cached)."""
|
||||
global _config_files
|
||||
if _config_files is not None:
|
||||
return _config_files
|
||||
|
||||
result: List[Dict[str, str]] = []
|
||||
try:
|
||||
hermes_home = _resolve_hermes_home()
|
||||
config_path = hermes_home / "config.yaml"
|
||||
if config_path.exists():
|
||||
import yaml
|
||||
|
||||
with open(config_path) as f:
|
||||
cfg = yaml.safe_load(f) or {}
|
||||
cred_files = cfg.get("terminal", {}).get("credential_files")
|
||||
if isinstance(cred_files, list):
|
||||
for item in cred_files:
|
||||
if isinstance(item, str) and item.strip():
|
||||
host_path = hermes_home / item.strip()
|
||||
if host_path.is_file():
|
||||
container_path = f"/root/.hermes/{item.strip()}"
|
||||
result.append({
|
||||
"host_path": str(host_path),
|
||||
"container_path": container_path,
|
||||
})
|
||||
except Exception as e:
|
||||
logger.debug("Could not read terminal.credential_files from config: %s", e)
|
||||
|
||||
_config_files = result
|
||||
return _config_files
|
||||
|
||||
|
||||
def get_credential_file_mounts() -> List[Dict[str, str]]:
|
||||
"""Return all credential files that should be mounted into remote sandboxes.
|
||||
|
||||
Each item has ``host_path`` and ``container_path`` keys.
|
||||
Combines skill-registered files and user config.
|
||||
"""
|
||||
mounts: Dict[str, str] = {}
|
||||
|
||||
# Skill-registered files
|
||||
for container_path, host_path in _registered_files.items():
|
||||
# Re-check existence (file may have been deleted since registration)
|
||||
if Path(host_path).is_file():
|
||||
mounts[container_path] = host_path
|
||||
|
||||
# Config-based files
|
||||
for entry in _load_config_files():
|
||||
cp = entry["container_path"]
|
||||
if cp not in mounts and Path(entry["host_path"]).is_file():
|
||||
mounts[cp] = entry["host_path"]
|
||||
|
||||
return [
|
||||
{"host_path": hp, "container_path": cp}
|
||||
for cp, hp in mounts.items()
|
||||
]
|
||||
|
||||
|
||||
def clear_credential_files() -> None:
|
||||
"""Reset the skill-scoped registry (e.g. on session reset)."""
|
||||
_registered_files.clear()
|
||||
|
||||
|
||||
def reset_config_cache() -> None:
|
||||
"""Force re-read of config on next access (for testing)."""
|
||||
global _config_files
|
||||
_config_files = None
|
||||
@@ -312,6 +312,24 @@ class DockerEnvironment(BaseEnvironment):
|
||||
elif workspace_explicitly_mounted:
|
||||
logger.debug("Skipping docker cwd mount: /workspace already mounted by user config")
|
||||
|
||||
# Mount credential files (OAuth tokens, etc.) declared by skills.
|
||||
# Read-only so the container can authenticate but not modify host creds.
|
||||
try:
|
||||
from tools.credential_files import get_credential_file_mounts
|
||||
|
||||
for mount_entry in get_credential_file_mounts():
|
||||
volume_args.extend([
|
||||
"-v",
|
||||
f"{mount_entry['host_path']}:{mount_entry['container_path']}:ro",
|
||||
])
|
||||
logger.info(
|
||||
"Docker: mounting credential %s -> %s",
|
||||
mount_entry["host_path"],
|
||||
mount_entry["container_path"],
|
||||
)
|
||||
except Exception as e:
|
||||
logger.debug("Docker: could not load credential file mounts: %s", e)
|
||||
|
||||
logger.info(f"Docker volume_args: {volume_args}")
|
||||
all_run_args = list(_SECURITY_ARGS) + writable_args + resource_args + volume_args
|
||||
logger.info(f"Docker run_args: {all_run_args}")
|
||||
@@ -406,8 +424,17 @@ class DockerEnvironment(BaseEnvironment):
|
||||
if effective_stdin is not None:
|
||||
cmd.append("-i")
|
||||
cmd.extend(["-w", work_dir])
|
||||
hermes_env = _load_hermes_env_vars() if self._forward_env else {}
|
||||
for key in self._forward_env:
|
||||
# Combine explicit docker_forward_env with skill-declared env_passthrough
|
||||
# vars so skills that declare required_environment_variables (e.g. Notion)
|
||||
# have their keys forwarded into the container automatically.
|
||||
forward_keys = set(self._forward_env)
|
||||
try:
|
||||
from tools.env_passthrough import get_all_passthrough
|
||||
forward_keys |= get_all_passthrough()
|
||||
except Exception:
|
||||
pass
|
||||
hermes_env = _load_hermes_env_vars() if forward_keys else {}
|
||||
for key in sorted(forward_keys):
|
||||
value = os.getenv(key)
|
||||
if value is None:
|
||||
value = hermes_env.get(key)
|
||||
|
||||
@@ -137,6 +137,28 @@ class ModalEnvironment(BaseEnvironment):
|
||||
],
|
||||
)
|
||||
|
||||
# Mount credential files (OAuth tokens, etc.) declared by skills.
|
||||
# These are read-only copies so the sandbox can authenticate with
|
||||
# external services but can't modify the host's credentials.
|
||||
cred_mounts = []
|
||||
try:
|
||||
from tools.credential_files import get_credential_file_mounts
|
||||
|
||||
for mount_entry in get_credential_file_mounts():
|
||||
cred_mounts.append(
|
||||
_modal.Mount.from_local_file(
|
||||
mount_entry["host_path"],
|
||||
remote_path=mount_entry["container_path"],
|
||||
)
|
||||
)
|
||||
logger.info(
|
||||
"Modal: mounting credential %s -> %s",
|
||||
mount_entry["host_path"],
|
||||
mount_entry["container_path"],
|
||||
)
|
||||
except Exception as e:
|
||||
logger.debug("Modal: could not load credential file mounts: %s", e)
|
||||
|
||||
# Start the async worker thread and create sandbox on it
|
||||
# so all gRPC channels are bound to the worker's event loop.
|
||||
self._worker.start()
|
||||
@@ -145,23 +167,90 @@ class ModalEnvironment(BaseEnvironment):
|
||||
app = await _modal.App.lookup.aio(
|
||||
"hermes-agent", create_if_missing=True
|
||||
)
|
||||
create_kwargs = dict(sandbox_kwargs)
|
||||
if cred_mounts:
|
||||
existing_mounts = list(create_kwargs.pop("mounts", []))
|
||||
existing_mounts.extend(cred_mounts)
|
||||
create_kwargs["mounts"] = existing_mounts
|
||||
sandbox = await _modal.Sandbox.create.aio(
|
||||
"sleep", "infinity",
|
||||
image=effective_image,
|
||||
app=app,
|
||||
timeout=int(sandbox_kwargs.pop("timeout", 3600)),
|
||||
**sandbox_kwargs,
|
||||
timeout=int(create_kwargs.pop("timeout", 3600)),
|
||||
**create_kwargs,
|
||||
)
|
||||
return app, sandbox
|
||||
|
||||
self._app, self._sandbox = self._worker.run_coroutine(
|
||||
_create_sandbox(), timeout=300
|
||||
)
|
||||
# Track synced credential files to avoid redundant pushes.
|
||||
# Key: container_path, Value: (mtime, size) of last synced version.
|
||||
self._synced_creds: Dict[str, tuple] = {}
|
||||
logger.info("Modal: sandbox created (task=%s)", self._task_id)
|
||||
|
||||
def _sync_credential_files(self) -> None:
|
||||
"""Push credential files into the running sandbox.
|
||||
|
||||
Mounts are set at sandbox creation, but credentials may be created
|
||||
later (e.g. OAuth setup mid-session). This writes the current file
|
||||
content into the sandbox via exec(), so new/updated credentials are
|
||||
available without recreating the sandbox.
|
||||
"""
|
||||
try:
|
||||
from tools.credential_files import get_credential_file_mounts
|
||||
|
||||
mounts = get_credential_file_mounts()
|
||||
if not mounts:
|
||||
return
|
||||
|
||||
for entry in mounts:
|
||||
host_path = entry["host_path"]
|
||||
container_path = entry["container_path"]
|
||||
hp = Path(host_path)
|
||||
try:
|
||||
stat = hp.stat()
|
||||
file_key = (stat.st_mtime, stat.st_size)
|
||||
except OSError:
|
||||
continue
|
||||
|
||||
# Skip if already synced with same mtime+size
|
||||
if self._synced_creds.get(container_path) == file_key:
|
||||
continue
|
||||
|
||||
try:
|
||||
content = hp.read_text(encoding="utf-8")
|
||||
except Exception:
|
||||
continue
|
||||
|
||||
# Write via base64 to avoid shell escaping issues with JSON
|
||||
import base64
|
||||
b64 = base64.b64encode(content.encode("utf-8")).decode("ascii")
|
||||
container_dir = str(Path(container_path).parent)
|
||||
cmd = (
|
||||
f"mkdir -p {shlex.quote(container_dir)} && "
|
||||
f"echo {shlex.quote(b64)} | base64 -d > {shlex.quote(container_path)}"
|
||||
)
|
||||
|
||||
_cp = container_path # capture for closure
|
||||
|
||||
async def _write():
|
||||
proc = await self._sandbox.exec.aio("bash", "-c", cmd)
|
||||
await proc.wait.aio()
|
||||
|
||||
self._worker.run_coroutine(_write(), timeout=15)
|
||||
self._synced_creds[container_path] = file_key
|
||||
logger.debug("Modal: synced credential %s -> %s", host_path, container_path)
|
||||
except Exception as e:
|
||||
logger.debug("Modal: credential file sync failed: %s", e)
|
||||
|
||||
def execute(self, command: str, cwd: str = "", *,
|
||||
timeout: int | None = None,
|
||||
stdin_data: str | None = None) -> dict:
|
||||
# Sync credential files before each command so mid-session
|
||||
# OAuth setups are picked up without requiring a restart.
|
||||
self._sync_credential_files()
|
||||
|
||||
if stdin_data is not None:
|
||||
marker = f"HERMES_EOF_{uuid.uuid4().hex[:8]}"
|
||||
while marker in stdin_data:
|
||||
|
||||
@@ -1106,6 +1106,27 @@ def skill_view(name: str, file_path: str = None, task_id: str = None) -> str:
|
||||
exc_info=True,
|
||||
)
|
||||
|
||||
# Register credential files for mounting into remote sandboxes
|
||||
# (Modal, Docker). Files that exist on the host are registered;
|
||||
# missing ones are added to the setup_needed indicators.
|
||||
required_cred_files_raw = frontmatter.get("required_credential_files", [])
|
||||
if not isinstance(required_cred_files_raw, list):
|
||||
required_cred_files_raw = []
|
||||
missing_cred_files: list = []
|
||||
if required_cred_files_raw:
|
||||
try:
|
||||
from tools.credential_files import register_credential_files
|
||||
|
||||
missing_cred_files = register_credential_files(required_cred_files_raw)
|
||||
if missing_cred_files:
|
||||
setup_needed = True
|
||||
except Exception:
|
||||
logger.debug(
|
||||
"Could not register credential files for skill %s",
|
||||
skill_name,
|
||||
exc_info=True,
|
||||
)
|
||||
|
||||
result = {
|
||||
"success": True,
|
||||
"name": skill_name,
|
||||
@@ -1121,6 +1142,7 @@ def skill_view(name: str, file_path: str = None, task_id: str = None) -> str:
|
||||
"required_environment_variables": required_env_vars,
|
||||
"required_commands": [],
|
||||
"missing_required_environment_variables": remaining_missing_required_envs,
|
||||
"missing_credential_files": missing_cred_files,
|
||||
"missing_required_commands": [],
|
||||
"setup_needed": setup_needed,
|
||||
"setup_skipped": capture_result["setup_skipped"],
|
||||
@@ -1139,6 +1161,8 @@ def skill_view(name: str, file_path: str = None, task_id: str = None) -> str:
|
||||
if setup_needed:
|
||||
missing_items = [
|
||||
f"env ${env_name}" for env_name in remaining_missing_required_envs
|
||||
] + [
|
||||
f"file {path}" for path in missing_cred_files
|
||||
]
|
||||
setup_note = _build_setup_note(
|
||||
SkillReadinessStatus.SETUP_NEEDED,
|
||||
|
||||
Reference in New Issue
Block a user