2026-02-21 22:31:43 -08:00
|
|
|
"""SSH remote execution environment with ControlMaster connection persistence."""
|
|
|
|
|
|
|
|
|
|
import logging
|
2026-04-06 13:16:25 -07:00
|
|
|
import shlex
|
2026-03-15 21:09:07 -07:00
|
|
|
import shutil
|
2026-02-21 22:31:43 -08:00
|
|
|
import subprocess
|
|
|
|
|
import tempfile
|
|
|
|
|
from pathlib import Path
|
|
|
|
|
|
|
|
|
|
from tools.environments.base import BaseEnvironment
|
|
|
|
|
|
|
|
|
|
logger = logging.getLogger(__name__)
|
|
|
|
|
|
|
|
|
|
|
2026-03-15 21:09:07 -07:00
|
|
|
def _ensure_ssh_available() -> None:
|
|
|
|
|
"""Fail fast with a clear error when the SSH client is unavailable."""
|
|
|
|
|
if not shutil.which("ssh"):
|
|
|
|
|
raise RuntimeError(
|
|
|
|
|
"SSH is not installed or not in PATH. Install OpenSSH client: apt install openssh-client"
|
|
|
|
|
)
|
|
|
|
|
|
|
|
|
|
|
2026-04-02 13:45:53 +05:30
|
|
|
class SSHEnvironment(BaseEnvironment):
|
2026-02-21 22:31:43 -08:00
|
|
|
"""Run commands on a remote machine over SSH.
|
|
|
|
|
|
|
|
|
|
Uses SSH ControlMaster for connection persistence so subsequent
|
|
|
|
|
commands are fast. Security benefit: the agent cannot modify its
|
|
|
|
|
own code since execution happens on a separate machine.
|
2026-02-23 02:11:33 -08:00
|
|
|
|
|
|
|
|
Foreground commands are interruptible: the local ssh process is killed
|
|
|
|
|
and a remote kill is attempted over the ControlMaster socket.
|
2026-03-12 23:38:11 +05:30
|
|
|
|
2026-04-02 13:45:53 +05:30
|
|
|
Uses the unified spawn-per-call model:
|
|
|
|
|
- bash -l once at session start to capture env snapshot on the remote
|
|
|
|
|
- bash -c for every subsequent command (fast, no shell init overhead)
|
|
|
|
|
- CWD tracked via cwdfile written after each command on the remote host
|
2026-02-21 22:31:43 -08:00
|
|
|
"""
|
|
|
|
|
|
2026-02-23 21:15:35 -08:00
|
|
|
def __init__(self, host: str, user: str, cwd: str = "~",
|
2026-03-12 23:38:11 +05:30
|
|
|
timeout: int = 60, port: int = 22, key_path: str = "",
|
2026-04-02 13:45:53 +05:30
|
|
|
**kwargs):
|
2026-02-21 22:31:43 -08:00
|
|
|
super().__init__(cwd=cwd, timeout=timeout)
|
|
|
|
|
self.host = host
|
|
|
|
|
self.user = user
|
|
|
|
|
self.port = port
|
|
|
|
|
self.key_path = key_path
|
|
|
|
|
|
|
|
|
|
self.control_dir = Path(tempfile.gettempdir()) / "hermes-ssh"
|
|
|
|
|
self.control_dir.mkdir(parents=True, exist_ok=True)
|
|
|
|
|
self.control_socket = self.control_dir / f"{user}@{host}:{port}.sock"
|
2026-03-15 21:09:07 -07:00
|
|
|
_ensure_ssh_available()
|
2026-02-21 22:31:43 -08:00
|
|
|
self._establish_connection()
|
feat: mount skills directory into all remote backends with live sync (#3890)
Skills with scripts/, templates/, and references/ subdirectories need
those files available inside sandboxed execution environments. Previously
the skills directory was missing entirely from remote backends.
Live sync — files stay current as credentials refresh and skills update:
- Docker/Singularity: bind mounts are inherently live (host changes
visible immediately)
- Modal: _sync_files() runs before each command with mtime+size caching,
pushing only changed credential and skill files (~13μs no-op overhead)
- SSH: rsync --safe-links before each command (naturally incremental)
- Daytona: _upload_if_changed() with mtime+size caching before each command
Security — symlink filtering:
- Docker/Singularity: sanitized temp copy when symlinks detected
- Modal/Daytona: iter_skills_files() skips symlinks
- SSH: rsync --safe-links skips symlinks pointing outside source tree
- Temp dir cleanup via atexit + reuse across calls
Non-root user support:
- SSH: detects remote home via echo $HOME, syncs to $HOME/.hermes/
- Daytona: detects sandbox home before sync, uploads to $HOME/.hermes/
- Docker/Modal/Singularity: run as root, /root/.hermes/ is correct
Also:
- credential_files.py: fix name/path key fallback in required_credential_files
- Singularity, SSH, Daytona: gained credential file support
- 14 tests covering symlink filtering, name/path fallback, iter_skills_files
2026-03-30 02:45:41 -07:00
|
|
|
self._remote_home = self._detect_remote_home()
|
|
|
|
|
self._sync_skills_and_credentials()
|
2026-04-02 13:45:53 +05:30
|
|
|
self.init_session()
|
2026-03-12 23:38:11 +05:30
|
|
|
|
2026-03-13 16:54:11 +05:30
|
|
|
def _build_ssh_command(self, extra_args: list | None = None) -> list:
|
2026-02-21 22:31:43 -08:00
|
|
|
cmd = ["ssh"]
|
|
|
|
|
cmd.extend(["-o", f"ControlPath={self.control_socket}"])
|
|
|
|
|
cmd.extend(["-o", "ControlMaster=auto"])
|
|
|
|
|
cmd.extend(["-o", "ControlPersist=300"])
|
|
|
|
|
cmd.extend(["-o", "BatchMode=yes"])
|
|
|
|
|
cmd.extend(["-o", "StrictHostKeyChecking=accept-new"])
|
|
|
|
|
cmd.extend(["-o", "ConnectTimeout=10"])
|
|
|
|
|
if self.port != 22:
|
|
|
|
|
cmd.extend(["-p", str(self.port)])
|
|
|
|
|
if self.key_path:
|
|
|
|
|
cmd.extend(["-i", self.key_path])
|
|
|
|
|
if extra_args:
|
|
|
|
|
cmd.extend(extra_args)
|
|
|
|
|
cmd.append(f"{self.user}@{self.host}")
|
|
|
|
|
return cmd
|
|
|
|
|
|
|
|
|
|
def _establish_connection(self):
|
|
|
|
|
cmd = self._build_ssh_command()
|
|
|
|
|
cmd.append("echo 'SSH connection established'")
|
|
|
|
|
try:
|
|
|
|
|
result = subprocess.run(cmd, capture_output=True, text=True, timeout=15)
|
|
|
|
|
if result.returncode != 0:
|
|
|
|
|
error_msg = result.stderr.strip() or result.stdout.strip()
|
|
|
|
|
raise RuntimeError(f"SSH connection failed: {error_msg}")
|
|
|
|
|
except subprocess.TimeoutExpired:
|
|
|
|
|
raise RuntimeError(f"SSH connection to {self.user}@{self.host} timed out")
|
|
|
|
|
|
feat: mount skills directory into all remote backends with live sync (#3890)
Skills with scripts/, templates/, and references/ subdirectories need
those files available inside sandboxed execution environments. Previously
the skills directory was missing entirely from remote backends.
Live sync — files stay current as credentials refresh and skills update:
- Docker/Singularity: bind mounts are inherently live (host changes
visible immediately)
- Modal: _sync_files() runs before each command with mtime+size caching,
pushing only changed credential and skill files (~13μs no-op overhead)
- SSH: rsync --safe-links before each command (naturally incremental)
- Daytona: _upload_if_changed() with mtime+size caching before each command
Security — symlink filtering:
- Docker/Singularity: sanitized temp copy when symlinks detected
- Modal/Daytona: iter_skills_files() skips symlinks
- SSH: rsync --safe-links skips symlinks pointing outside source tree
- Temp dir cleanup via atexit + reuse across calls
Non-root user support:
- SSH: detects remote home via echo $HOME, syncs to $HOME/.hermes/
- Daytona: detects sandbox home before sync, uploads to $HOME/.hermes/
- Docker/Modal/Singularity: run as root, /root/.hermes/ is correct
Also:
- credential_files.py: fix name/path key fallback in required_credential_files
- Singularity, SSH, Daytona: gained credential file support
- 14 tests covering symlink filtering, name/path fallback, iter_skills_files
2026-03-30 02:45:41 -07:00
|
|
|
def _detect_remote_home(self) -> str:
|
|
|
|
|
"""Detect the remote user's home directory."""
|
|
|
|
|
try:
|
|
|
|
|
cmd = self._build_ssh_command()
|
|
|
|
|
cmd.append("echo $HOME")
|
|
|
|
|
result = subprocess.run(cmd, capture_output=True, text=True, timeout=10)
|
|
|
|
|
home = result.stdout.strip()
|
|
|
|
|
if home and result.returncode == 0:
|
|
|
|
|
logger.debug("SSH: remote home = %s", home)
|
|
|
|
|
return home
|
|
|
|
|
except Exception:
|
|
|
|
|
pass
|
|
|
|
|
# Fallback: guess from username
|
|
|
|
|
if self.user == "root":
|
|
|
|
|
return "/root"
|
|
|
|
|
return f"/home/{self.user}"
|
|
|
|
|
|
|
|
|
|
def _sync_skills_and_credentials(self) -> None:
|
|
|
|
|
"""Rsync skills directory and credential files to the remote host."""
|
|
|
|
|
try:
|
|
|
|
|
container_base = f"{self._remote_home}/.hermes"
|
|
|
|
|
from tools.credential_files import get_credential_file_mounts, get_skills_directory_mount
|
|
|
|
|
|
|
|
|
|
rsync_base = ["rsync", "-az", "--timeout=30", "--safe-links"]
|
|
|
|
|
ssh_opts = f"ssh -o ControlPath={self.control_socket} -o ControlMaster=auto"
|
|
|
|
|
if self.port != 22:
|
|
|
|
|
ssh_opts += f" -p {self.port}"
|
|
|
|
|
if self.key_path:
|
|
|
|
|
ssh_opts += f" -i {self.key_path}"
|
|
|
|
|
rsync_base.extend(["-e", ssh_opts])
|
|
|
|
|
dest_prefix = f"{self.user}@{self.host}"
|
|
|
|
|
|
|
|
|
|
# Sync individual credential files (remap /root/.hermes to detected home)
|
|
|
|
|
for mount_entry in get_credential_file_mounts():
|
|
|
|
|
remote_path = mount_entry["container_path"].replace("/root/.hermes", container_base, 1)
|
|
|
|
|
parent_dir = str(Path(remote_path).parent)
|
|
|
|
|
mkdir_cmd = self._build_ssh_command()
|
|
|
|
|
mkdir_cmd.append(f"mkdir -p {parent_dir}")
|
|
|
|
|
subprocess.run(mkdir_cmd, capture_output=True, text=True, timeout=10)
|
|
|
|
|
cmd = rsync_base + [mount_entry["host_path"], f"{dest_prefix}:{remote_path}"]
|
|
|
|
|
result = subprocess.run(cmd, capture_output=True, text=True, timeout=30)
|
|
|
|
|
if result.returncode == 0:
|
|
|
|
|
logger.info("SSH: synced credential %s -> %s", mount_entry["host_path"], remote_path)
|
|
|
|
|
else:
|
|
|
|
|
logger.debug("SSH: rsync credential failed: %s", result.stderr.strip())
|
|
|
|
|
|
2026-04-02 13:45:53 +05:30
|
|
|
# Sync skills directory (remap to detected home)
|
|
|
|
|
skills_mount = get_skills_directory_mount(container_base=container_base)
|
|
|
|
|
if skills_mount:
|
feat: mount skills directory into all remote backends with live sync (#3890)
Skills with scripts/, templates/, and references/ subdirectories need
those files available inside sandboxed execution environments. Previously
the skills directory was missing entirely from remote backends.
Live sync — files stay current as credentials refresh and skills update:
- Docker/Singularity: bind mounts are inherently live (host changes
visible immediately)
- Modal: _sync_files() runs before each command with mtime+size caching,
pushing only changed credential and skill files (~13μs no-op overhead)
- SSH: rsync --safe-links before each command (naturally incremental)
- Daytona: _upload_if_changed() with mtime+size caching before each command
Security — symlink filtering:
- Docker/Singularity: sanitized temp copy when symlinks detected
- Modal/Daytona: iter_skills_files() skips symlinks
- SSH: rsync --safe-links skips symlinks pointing outside source tree
- Temp dir cleanup via atexit + reuse across calls
Non-root user support:
- SSH: detects remote home via echo $HOME, syncs to $HOME/.hermes/
- Daytona: detects sandbox home before sync, uploads to $HOME/.hermes/
- Docker/Modal/Singularity: run as root, /root/.hermes/ is correct
Also:
- credential_files.py: fix name/path key fallback in required_credential_files
- Singularity, SSH, Daytona: gained credential file support
- 14 tests covering symlink filtering, name/path fallback, iter_skills_files
2026-03-30 02:45:41 -07:00
|
|
|
remote_path = skills_mount["container_path"]
|
|
|
|
|
mkdir_cmd = self._build_ssh_command()
|
|
|
|
|
mkdir_cmd.append(f"mkdir -p {remote_path}")
|
|
|
|
|
subprocess.run(mkdir_cmd, capture_output=True, text=True, timeout=10)
|
|
|
|
|
cmd = rsync_base + [
|
|
|
|
|
skills_mount["host_path"].rstrip("/") + "/",
|
|
|
|
|
f"{dest_prefix}:{remote_path}/",
|
|
|
|
|
]
|
|
|
|
|
result = subprocess.run(cmd, capture_output=True, text=True, timeout=60)
|
|
|
|
|
if result.returncode == 0:
|
|
|
|
|
logger.info("SSH: synced skills dir %s -> %s", skills_mount["host_path"], remote_path)
|
|
|
|
|
else:
|
|
|
|
|
logger.debug("SSH: rsync skills dir failed: %s", result.stderr.strip())
|
|
|
|
|
except Exception as e:
|
|
|
|
|
logger.debug("SSH: could not sync skills/credentials: %s", e)
|
|
|
|
|
|
2026-04-02 13:45:53 +05:30
|
|
|
# ------------------------------------------------------------------
|
|
|
|
|
# Unified execution hooks
|
|
|
|
|
# ------------------------------------------------------------------
|
2026-03-15 02:33:04 +05:30
|
|
|
|
2026-04-02 13:45:53 +05:30
|
|
|
def _before_execute(self):
|
|
|
|
|
"""Incremental sync before each command so mid-session credential
|
|
|
|
|
refreshes and skill updates are picked up."""
|
|
|
|
|
self._sync_skills_and_credentials()
|
2026-03-12 23:38:11 +05:30
|
|
|
|
2026-04-02 13:45:53 +05:30
|
|
|
def _run_bash(self, cmd_string, *, stdin_data=None):
|
2026-03-12 23:38:11 +05:30
|
|
|
cmd = self._build_ssh_command()
|
2026-04-02 13:45:53 +05:30
|
|
|
cmd.extend(["bash", "-c", shlex.quote(cmd_string)])
|
|
|
|
|
proc = subprocess.Popen(
|
2026-03-12 23:38:11 +05:30
|
|
|
cmd,
|
2026-04-02 13:45:53 +05:30
|
|
|
stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
|
|
|
|
|
stdin=subprocess.PIPE if stdin_data is not None else subprocess.DEVNULL,
|
2026-03-12 23:38:11 +05:30
|
|
|
text=True,
|
|
|
|
|
)
|
2026-04-02 13:45:53 +05:30
|
|
|
if stdin_data:
|
2026-03-13 16:54:11 +05:30
|
|
|
try:
|
2026-04-02 13:45:53 +05:30
|
|
|
proc.stdin.write(stdin_data)
|
|
|
|
|
proc.stdin.close()
|
|
|
|
|
except (BrokenPipeError, OSError):
|
|
|
|
|
pass
|
|
|
|
|
return proc
|
2026-03-13 16:54:11 +05:30
|
|
|
|
2026-04-02 13:45:53 +05:30
|
|
|
def _run_bash_login(self, cmd_string):
|
|
|
|
|
cmd = self._build_ssh_command()
|
|
|
|
|
cmd.extend(["bash", "-l", "-c", shlex.quote(cmd_string)])
|
|
|
|
|
return subprocess.Popen(
|
|
|
|
|
cmd, stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
|
|
|
|
|
stdin=subprocess.DEVNULL, text=True,
|
2026-03-12 23:38:11 +05:30
|
|
|
)
|
2026-04-02 13:45:53 +05:30
|
|
|
|
|
|
|
|
def _read_file_in_env(self, path: str) -> str:
|
|
|
|
|
"""SSH override: use subprocess.run for single-shot cat, suppress stderr.
|
|
|
|
|
|
|
|
|
|
SSH connection warnings (post-quantum, etc.) must not pollute
|
|
|
|
|
the cwdfile read — use separate stderr to discard them.
|
|
|
|
|
"""
|
2026-03-12 23:38:11 +05:30
|
|
|
cmd = self._build_ssh_command()
|
2026-04-02 13:45:53 +05:30
|
|
|
cmd.append(f"cat {shlex.quote(path)} 2>/dev/null")
|
2026-03-12 23:38:11 +05:30
|
|
|
try:
|
|
|
|
|
result = subprocess.run(
|
2026-03-13 16:54:11 +05:30
|
|
|
cmd, capture_output=True, text=True, timeout=10,
|
2026-03-12 23:38:11 +05:30
|
|
|
)
|
2026-04-02 13:45:53 +05:30
|
|
|
return result.stdout
|
2026-03-12 23:38:11 +05:30
|
|
|
except (subprocess.TimeoutExpired, OSError):
|
2026-04-02 13:45:53 +05:30
|
|
|
return ""
|
2026-03-12 23:38:11 +05:30
|
|
|
|
2026-04-02 13:45:53 +05:30
|
|
|
def cleanup(self):
|
|
|
|
|
# Clean up remote snapshot and cwdfile before closing ControlMaster
|
|
|
|
|
if self._snapshot_path or self._cwdfile_path:
|
|
|
|
|
paths = " ".join(p for p in (self._snapshot_path, self._cwdfile_path) if p)
|
2026-03-15 02:39:56 +05:30
|
|
|
try:
|
2026-04-02 13:45:53 +05:30
|
|
|
cmd = self._build_ssh_command()
|
|
|
|
|
cmd.append(f"rm -f {paths}")
|
|
|
|
|
subprocess.run(cmd, capture_output=True, timeout=5)
|
2026-03-15 02:39:56 +05:30
|
|
|
except Exception:
|
|
|
|
|
pass
|
2026-02-23 02:11:33 -08:00
|
|
|
|
2026-03-15 02:39:56 +05:30
|
|
|
super().cleanup()
|
2026-02-21 22:31:43 -08:00
|
|
|
if self.control_socket.exists():
|
|
|
|
|
try:
|
|
|
|
|
cmd = ["ssh", "-o", f"ControlPath={self.control_socket}",
|
|
|
|
|
"-O", "exit", f"{self.user}@{self.host}"]
|
|
|
|
|
subprocess.run(cmd, capture_output=True, timeout=5)
|
|
|
|
|
except (OSError, subprocess.SubprocessError):
|
|
|
|
|
pass
|
|
|
|
|
try:
|
|
|
|
|
self.control_socket.unlink()
|
|
|
|
|
except OSError:
|
|
|
|
|
pass
|