Compare commits

...

12 Commits

Author SHA1 Message Date
teknium1
621fd80b1e fix(cli): accept session ID prefixes for session actions
Resolve session IDs by exact match or unique prefix for sessions delete/export/rename so IDs copied from Preview                                            Last Active   Src    ID
──────────────────────────────────────────────────────────────────────────────────────────
Search for GitHub/GitLab source repositories for   11m ago       cli    20260315_034720_8e1f
[SYSTEM: The user has invoked the "minecraft-atm   1m ago        cli    20260315_034035_57b6
                                                   1h ago        cron   cron_job-1_20260315_
[SYSTEM: The user has invoked the "hermes-agent-   9m ago        cli    20260315_014304_652a
                                                   4h ago        cron   cron_job-1_20260314_
[The user attached an image. Here's what it cont   4h ago        cli    20260314_233806_c8f3
[SYSTEM: The user has invoked the "google-worksp   1h ago        cli    20260314_233301_b04f
Inspect the opencode codebase for how it sends m   4h ago        cli    20260314_232543_0601
Inspect the clawdbot codebase for how it sends m   4h ago        cli    20260314_232543_8125
                                                   4h ago        cron   cron_job-1_20260314_
Reply with exactly: smoke-ok                       4h ago        cli    20260314_231730_aac9
                                                   4h ago        cron   cron_job-1_20260314_
[SYSTEM: The user has invoked the "hermes-agent-   4h ago        cli    20260314_231111_3586
[SYSTEM: The user has invoked the "hermes-agent-   4h ago        cli    20260314_225551_daff
                                                   5h ago        cron   cron_job-1_20260314_
[SYSTEM: The user has invoked the "google-worksp   4h ago        cli    20260314_224629_a9c6
k_sze   — 10:34 PM Just ran hermes update and I    5h ago        cli    20260314_224243_544e
                                                   5h ago        cron   cron_job-1_20260314_
                                                   5h ago        cron   cron_job-1_20260314_
                                                   5h ago        cron   cron_job-1_20260314_ work even when the table view truncates them. Add SessionDB prefix-resolution coverage and a CLI regression test for deleting by listed prefix.
2026-03-15 04:01:56 -07:00
Teknium
2b8fd9a8e3 Merge pull request #1422 from NousResearch/fix/1409-photo-burst-interrupts
fix(gateway): prevent Telegram photo burst interrupts
2026-03-15 03:51:51 -07:00
teknium1
fef710aca8 test(gateway): cover photo burst interrupt regressions
Add regression coverage for non-album Telegram photo burst batching, photo follow-ups that should queue without interrupting active runs, and the gateway priority-interrupt path for photo events.
2026-03-15 03:50:45 -07:00
CoinDegen
4ae1334287 fix(gateway): prevent telegram photo burst interrupts 2026-03-15 03:49:01 -07:00
Teknium
db3e3aa6c5 Merge pull request #1421 from NousResearch/fix/1247-preserve-mcp-toolsets
fix(tools): preserve MCP toolsets when saving platform tool config
2026-03-15 03:33:31 -07:00
ygd58
633488e0c0 fix(tools): preserve MCP toolsets when saving platform tool config
_save_platform_tools() overwrote the entire platform_toolsets list with
only the toolsets known to CONFIGURABLE_TOOLSETS. This silently dropped
any MCP server toolsets that users had added manually to config.yaml.

Fix: collect any existing toolset keys that are not in CONFIGURABLE_TOOLSETS
and append them back after the wizard's selections are written. This ensures
MCP toolsets survive a hermes tools save.

Fixes #1247
2026-03-15 03:28:20 -07:00
Teknium
0de200cf4d Merge pull request #1419 from NousResearch/fix/1264-env-secret-blocklist
fix(security): block gateway and tool env vars in subprocesses
2026-03-15 03:22:58 -07:00
Teknium
f6fdb18fe6 Merge pull request #1417 from NousResearch/fix/1056-dm-session-isolation
fix(gateway): isolate DM sessions by chat_id
2026-03-15 03:22:39 -07:00
teknium1
b177b4abad fix(security): block gateway and tool env vars in subprocesses
Extend subprocess env sanitization beyond provider credentials by blocking Hermes-managed tool, messaging, and related gateway runtime vars. Reuse a shared sanitizer in LocalEnvironment and ProcessRegistry so background and PTY processes honor the same blocklist and _HERMES_FORCE_ escape hatch. Add regression coverage for local env execution and process_registry spawning.
2026-03-15 02:51:04 -07:00
teknium1
232ba441d7 test: cover DM session key isolation
Update interrupt-key expectations for namespaced DM session keys and add a regression test that different DM chat IDs produce distinct gateway sessions.
2026-03-15 02:38:48 -07:00
heyyyimmax
34e120bcbb fix(gateway): enforce chat_id isolation for all DM sessions 2026-03-15 02:37:53 -07:00
Teknium
779f8df6a6 Merge pull request #1408 from NousResearch/hermes/hermes-daa73839
fix: make Claude image handling work end-to-end
2026-03-14 23:45:03 -07:00
19 changed files with 751 additions and 65 deletions

View File

@@ -288,6 +288,7 @@ class MessageEvent:
message_id: Optional[str] = None
# Media attachments
# media_urls: local file paths (for vision tool access)
media_urls: List[str] = field(default_factory=list)
media_types: List[str] = field(default_factory=list)
@@ -751,7 +752,25 @@ class BasePlatformAdapter(ABC):
# Check if there's already an active handler for this session
if session_key in self._active_sessions:
# Store this as a pending message - it will interrupt the running agent
# Special case: photo bursts/albums frequently arrive as multiple near-
# simultaneous messages. Queue them without interrupting the active run,
# then process them immediately after the current task finishes.
if event.message_type == MessageType.PHOTO:
print(f"[{self.name}] 🖼️ Queuing photo follow-up for session {session_key} without interrupt")
existing = self._pending_messages.get(session_key)
if existing and existing.message_type == MessageType.PHOTO:
existing.media_urls.extend(event.media_urls)
existing.media_types.extend(event.media_types)
if event.text:
if not existing.text:
existing.text = event.text
elif event.text not in existing.text:
existing.text = f"{existing.text}\n\n{event.text}".strip()
else:
self._pending_messages[session_key] = event
return # Don't interrupt now - will run after current task completes
# Default behavior for non-photo follow-ups: interrupt the running agent
print(f"[{self.name}] ⚡ New message while session {session_key} is active - triggering interrupt")
self._pending_messages[session_key] = event
# Signal the interrupt (the processing task checks this)

View File

@@ -111,6 +111,11 @@ class TelegramAdapter(BasePlatformAdapter):
super().__init__(config, Platform.TELEGRAM)
self._app: Optional[Application] = None
self._bot: Optional[Bot] = None
# Buffer rapid/album photo updates so Telegram image bursts are handled
# as a single MessageEvent instead of self-interrupting multiple turns.
self._media_batch_delay_seconds = float(os.getenv("HERMES_TELEGRAM_MEDIA_BATCH_DELAY_SECONDS", "0.8"))
self._pending_photo_batches: Dict[str, MessageEvent] = {}
self._pending_photo_batch_tasks: Dict[str, asyncio.Task] = {}
self._media_group_events: Dict[str, MessageEvent] = {}
self._media_group_tasks: Dict[str, asyncio.Task] = {}
self._token_lock_identity: Optional[str] = None
@@ -289,13 +294,19 @@ class TelegramAdapter(BasePlatformAdapter):
release_scoped_lock("telegram-bot-token", self._token_lock_identity)
except Exception as e:
logger.warning("[%s] Error releasing Telegram token lock: %s", self.name, e, exc_info=True)
for task in self._pending_photo_batch_tasks.values():
if task and not task.done():
task.cancel()
self._pending_photo_batch_tasks.clear()
self._pending_photo_batches.clear()
self._mark_disconnected()
self._app = None
self._bot = None
self._token_lock_identity = None
logger.info("[%s] Disconnected from Telegram", self.name)
async def send(
self,
chat_id: str,
@@ -807,6 +818,49 @@ class TelegramAdapter(BasePlatformAdapter):
event.text = "\n".join(parts)
await self.handle_message(event)
def _photo_batch_key(self, event: MessageEvent, msg: Message) -> str:
"""Return a batching key for Telegram photos/albums."""
from gateway.session import build_session_key
session_key = build_session_key(event.source)
media_group_id = getattr(msg, "media_group_id", None)
if media_group_id:
return f"{session_key}:album:{media_group_id}"
return f"{session_key}:photo-burst"
async def _flush_photo_batch(self, batch_key: str) -> None:
"""Send a buffered photo burst/album as a single MessageEvent."""
current_task = asyncio.current_task()
try:
await asyncio.sleep(self._media_batch_delay_seconds)
event = self._pending_photo_batches.pop(batch_key, None)
if not event:
return
logger.info("[Telegram] Flushing photo batch %s with %d image(s)", batch_key, len(event.media_urls))
await self.handle_message(event)
finally:
if self._pending_photo_batch_tasks.get(batch_key) is current_task:
self._pending_photo_batch_tasks.pop(batch_key, None)
def _enqueue_photo_event(self, batch_key: str, event: MessageEvent) -> None:
"""Merge photo events into a pending batch and schedule flush."""
existing = self._pending_photo_batches.get(batch_key)
if existing is None:
self._pending_photo_batches[batch_key] = event
else:
existing.media_urls.extend(event.media_urls)
existing.media_types.extend(event.media_types)
if event.text:
if not existing.text:
existing.text = event.text
elif event.text not in existing.text:
existing.text = f"{existing.text}\n\n{event.text}".strip()
prior_task = self._pending_photo_batch_tasks.get(batch_key)
if prior_task and not prior_task.done():
prior_task.cancel()
self._pending_photo_batch_tasks[batch_key] = asyncio.create_task(self._flush_photo_batch(batch_key))
async def _handle_media_message(self, update: Update, context: ContextTypes.DEFAULT_TYPE) -> None:
"""Handle incoming media messages, downloading images to local cache."""
if not update.message:
@@ -858,14 +912,22 @@ class TelegramAdapter(BasePlatformAdapter):
if file_obj.file_path.lower().endswith(candidate):
ext = candidate
break
# Save to cache and populate media_urls with the local path
# Save to local cache (for vision tool access)
cached_path = cache_image_from_bytes(bytes(image_bytes), ext=ext)
event.media_urls = [cached_path]
event.media_types = [f"image/{ext.lstrip('.')}"]
event.media_types = [f"image/{ext.lstrip('.')}" ]
logger.info("[Telegram] Cached user photo at %s", cached_path)
media_group_id = getattr(msg, "media_group_id", None)
if media_group_id:
await self._queue_media_group_event(str(media_group_id), event)
else:
batch_key = self._photo_batch_key(event, msg)
self._enqueue_photo_event(batch_key, event)
return
except Exception as e:
logger.warning("[Telegram] Failed to cache photo: %s", e, exc_info=True)
# Download voice/audio messages to cache for STT transcription
if msg.voice:
try:

View File

@@ -1095,11 +1095,36 @@ class GatewayRunner:
)
return None
# PRIORITY: If an agent is already running for this session, interrupt it
# immediately. This is before command parsing to minimize latency -- the
# user's "stop" message reaches the agent as fast as possible.
# PRIORITY handling when an agent is already running for this session.
# Default behavior is to interrupt immediately so user text/stop messages
# are handled with minimal latency.
#
# Special case: Telegram/photo bursts often arrive as multiple near-
# simultaneous updates. Do NOT interrupt for photo-only follow-ups here;
# let the adapter-level batching/queueing logic absorb them.
_quick_key = build_session_key(source)
if _quick_key in self._running_agents:
if event.message_type == MessageType.PHOTO:
logger.debug("PRIORITY photo follow-up for session %s — queueing without interrupt", _quick_key[:20])
adapter = self.adapters.get(source.platform)
if adapter:
# Reuse adapter queue semantics so photo bursts merge cleanly.
if _quick_key in adapter._pending_messages:
existing = adapter._pending_messages[_quick_key]
if getattr(existing, "message_type", None) == MessageType.PHOTO:
existing.media_urls.extend(event.media_urls)
existing.media_types.extend(event.media_types)
if event.text:
if not existing.text:
existing.text = event.text
elif event.text not in existing.text:
existing.text = f"{existing.text}\n\n{event.text}".strip()
else:
adapter._pending_messages[_quick_key] = event
else:
adapter._pending_messages[_quick_key] = event
return None
running_agent = self._running_agents[_quick_key]
logger.debug("PRIORITY interrupt for session %s", _quick_key[:20])
running_agent.interrupt(event.text)
@@ -3490,9 +3515,13 @@ class GatewayRunner:
1. Immediately understand what the user sent (no extra tool call).
2. Re-examine the image with vision_analyze if it needs more detail.
Athabasca persistence should happen through Athabasca's own POST
/api/uploads flow, using the returned asset.publicUrl rather than local
cache paths.
Args:
user_text: The user's original caption / message text.
image_paths: List of local file paths to cached images.
user_text: The user's original caption / message text.
image_paths: List of local file paths to cached images.
Returns:
The enriched message string with vision descriptions prepended.
@@ -3517,10 +3546,16 @@ class GatewayRunner:
result = _json.loads(result_json)
if result.get("success"):
description = result.get("analysis", "")
athabasca_note = (
"\n[If this image needs to persist in Athabasca state, upload the cached file "
"through Athabasca POST /api/uploads and use the returned asset.publicUrl. "
"Do not store the local cache path as the canonical imageUrl.]"
)
enriched_parts.append(
f"[The user sent an image~ Here's what I can see:\n{description}]\n"
f"[If you need a closer look, use vision_analyze with "
f"image_url: {path} ~]"
f"{athabasca_note}"
)
else:
enriched_parts.append(

View File

@@ -321,25 +321,32 @@ def build_session_key(source: SessionSource) -> str:
This is the single source of truth for session key construction.
DM rules:
- WhatsApp DMs include chat_id (multi-user support).
- Other DMs include thread_id when present (e.g. Slack threaded DMs),
so each DM thread gets its own session while top-level DMs share one.
- Without thread_id or chat_id, all DMs share a single session.
- DMs include chat_id when present, so each private conversation is isolated.
- thread_id further differentiates threaded DMs within the same DM chat.
- Without chat_id, thread_id is used as a best-effort fallback.
- Without thread_id or chat_id, DMs share a single session.
Group/channel rules:
- thread_id differentiates threads within a channel.
- Without thread_id, all messages in a channel share one session.
- chat_id identifies the parent group/channel.
- thread_id differentiates threads within that parent chat.
- Without identifiers, messages fall back to one session per platform/chat_type.
"""
platform = source.platform.value
if source.chat_type == "dm":
if source.chat_id:
if source.thread_id:
return f"agent:main:{platform}:dm:{source.chat_id}:{source.thread_id}"
return f"agent:main:{platform}:dm:{source.chat_id}"
if source.thread_id:
return f"agent:main:{platform}:dm:{source.thread_id}"
if platform == "whatsapp" and source.chat_id:
return f"agent:main:{platform}:dm:{source.chat_id}"
return f"agent:main:{platform}:dm"
if source.chat_id:
if source.thread_id:
return f"agent:main:{platform}:{source.chat_type}:{source.chat_id}:{source.thread_id}"
return f"agent:main:{platform}:{source.chat_type}:{source.chat_id}"
if source.thread_id:
return f"agent:main:{platform}:{source.chat_type}:{source.chat_id}:{source.thread_id}"
return f"agent:main:{platform}:{source.chat_type}:{source.chat_id}"
return f"agent:main:{platform}:{source.chat_type}:{source.thread_id}"
return f"agent:main:{platform}:{source.chat_type}"
class SessionStore:

View File

@@ -3103,7 +3103,11 @@ For more help on a command:
elif action == "export":
if args.session_id:
data = db.export_session(args.session_id)
resolved_session_id = db.resolve_session_id(args.session_id)
if not resolved_session_id:
print(f"Session '{args.session_id}' not found.")
return
data = db.export_session(resolved_session_id)
if not data:
print(f"Session '{args.session_id}' not found.")
return
@@ -3118,13 +3122,17 @@ For more help on a command:
print(f"Exported {len(sessions)} sessions to {args.output}")
elif action == "delete":
resolved_session_id = db.resolve_session_id(args.session_id)
if not resolved_session_id:
print(f"Session '{args.session_id}' not found.")
return
if not args.yes:
confirm = input(f"Delete session '{args.session_id}' and all its messages? [y/N] ")
confirm = input(f"Delete session '{resolved_session_id}' and all its messages? [y/N] ")
if confirm.lower() not in ("y", "yes"):
print("Cancelled.")
return
if db.delete_session(args.session_id):
print(f"Deleted session '{args.session_id}'.")
if db.delete_session(resolved_session_id):
print(f"Deleted session '{resolved_session_id}'.")
else:
print(f"Session '{args.session_id}' not found.")
@@ -3140,10 +3148,14 @@ For more help on a command:
print(f"Pruned {count} session(s).")
elif action == "rename":
resolved_session_id = db.resolve_session_id(args.session_id)
if not resolved_session_id:
print(f"Session '{args.session_id}' not found.")
return
title = " ".join(args.title)
try:
if db.set_session_title(args.session_id, title):
print(f"Session '{args.session_id}' renamed to: {title}")
if db.set_session_title(resolved_session_id, title):
print(f"Session '{resolved_session_id}' renamed to: {title}")
else:
print(f"Session '{args.session_id}' not found.")
except ValueError as e:

View File

@@ -354,9 +354,29 @@ def _get_platform_tools(config: dict, platform: str) -> Set[str]:
def _save_platform_tools(config: dict, platform: str, enabled_toolset_keys: Set[str]):
"""Save the selected toolset keys for a platform to config."""
"""Save the selected toolset keys for a platform to config.
Preserves any non-configurable toolset entries (like MCP server names)
that were already in the config for this platform.
"""
config.setdefault("platform_toolsets", {})
config["platform_toolsets"][platform] = sorted(enabled_toolset_keys)
# Get the set of all configurable toolset keys
configurable_keys = {ts_key for ts_key, _, _ in CONFIGURABLE_TOOLSETS}
# Get existing toolsets for this platform
existing_toolsets = config.get("platform_toolsets", {}).get(platform, [])
if not isinstance(existing_toolsets, list):
existing_toolsets = []
# Preserve any entries that are NOT configurable toolsets (i.e. MCP server names)
preserved_entries = {
entry for entry in existing_toolsets
if entry not in configurable_keys
}
# Merge preserved entries with new enabled toolsets
config["platform_toolsets"][platform] = sorted(enabled_toolset_keys | preserved_entries)
save_config(config)

View File

@@ -249,6 +249,32 @@ class SessionDB:
row = cursor.fetchone()
return dict(row) if row else None
def resolve_session_id(self, session_id_or_prefix: str) -> Optional[str]:
"""Resolve an exact or uniquely prefixed session ID to the full ID.
Returns the exact ID when it exists. Otherwise treats the input as a
prefix and returns the single matching session ID if the prefix is
unambiguous. Returns None for no matches or ambiguous prefixes.
"""
exact = self.get_session(session_id_or_prefix)
if exact:
return exact["id"]
escaped = (
session_id_or_prefix
.replace("\\", "\\\\")
.replace("%", "\\%")
.replace("_", "\\_")
)
cursor = self._conn.execute(
"SELECT id FROM sessions WHERE id LIKE ? ESCAPE '\\' ORDER BY started_at DESC LIMIT 2",
(f"{escaped}%",),
)
matches = [row["id"] for row in cursor.fetchall()]
if len(matches) == 1:
return matches[0]
return None
# Maximum length for session titles
MAX_TITLE_LENGTH = 100

View File

@@ -0,0 +1,25 @@
from unittest.mock import patch
import pytest
@pytest.mark.asyncio
async def test_image_enrichment_uses_athabasca_upload_guidance_without_stale_r2_warning():
from gateway.run import GatewayRunner
runner = object.__new__(GatewayRunner)
with patch(
"tools.vision_tools.vision_analyze_tool",
return_value='{"success": true, "analysis": "A painted serpent warrior."}',
):
enriched = await runner._enrich_message_with_vision(
"caption",
["/tmp/test.jpg"],
)
assert "R2 not configured" not in enriched
assert "Gateway media URL available for reference" not in enriched
assert "POST /api/uploads" in enriched
assert "Do not store the local cache path" in enriched
assert "caption" in enriched

View File

@@ -11,7 +11,7 @@ import asyncio
import pytest
from gateway.config import Platform, PlatformConfig
from gateway.platforms.base import BasePlatformAdapter, MessageEvent, SendResult
from gateway.platforms.base import BasePlatformAdapter, MessageEvent, MessageType, SendResult
from gateway.session import SessionSource, build_session_key
@@ -50,11 +50,11 @@ class TestInterruptKeyConsistency:
"""Ensure adapter interrupt methods are queried with session_key, not chat_id."""
def test_session_key_differs_from_chat_id_for_dm(self):
"""Session key for a DM is NOT the same as chat_id."""
"""Session key for a DM is namespaced and includes the DM chat_id."""
source = _source("123456", "dm")
session_key = build_session_key(source)
assert session_key != source.chat_id
assert session_key == "agent:main:telegram:dm"
assert session_key == "agent:main:telegram:dm:123456"
def test_session_key_differs_from_chat_id_for_group(self):
"""Session key for a group chat includes prefix, unlike raw chat_id."""
@@ -122,3 +122,29 @@ class TestInterruptKeyConsistency:
# Interrupt event was set
assert adapter._active_sessions[session_key].is_set()
@pytest.mark.asyncio
async def test_photo_followup_is_queued_without_interrupt(self):
"""Photo follow-ups should queue behind the active run instead of interrupting it."""
adapter = StubAdapter()
adapter.set_message_handler(lambda event: asyncio.sleep(0, result=None))
source = _source("-1001234", "group")
session_key = build_session_key(source)
interrupt_event = asyncio.Event()
adapter._active_sessions[session_key] = interrupt_event
event = MessageEvent(
text="caption",
source=source,
message_type=MessageType.PHOTO,
message_id="2",
media_urls=["/tmp/photo-a.jpg"],
media_types=["image/jpeg"],
)
await adapter.handle_message(event)
queued = adapter._pending_messages[session_key]
assert queued is event
assert queued.media_urls == ["/tmp/photo-a.jpg"]
assert interrupt_event.is_set() is False

View File

@@ -338,7 +338,7 @@ class TestSessionStoreRewriteTranscript:
class TestWhatsAppDMSessionKeyConsistency:
"""Regression: all session-key construction must go through build_session_key
so WhatsApp DMs include chat_id while other DMs do not."""
so DMs are isolated by chat_id across platforms."""
@pytest.fixture()
def store(self, tmp_path):
@@ -369,15 +369,24 @@ class TestWhatsAppDMSessionKeyConsistency:
)
assert store._generate_session_key(source) == build_session_key(source)
def test_telegram_dm_omits_chat_id(self):
"""Non-WhatsApp DMs should still omit chat_id (single owner DM)."""
def test_telegram_dm_includes_chat_id(self):
"""Non-WhatsApp DMs should also include chat_id to separate users."""
source = SessionSource(
platform=Platform.TELEGRAM,
chat_id="99",
chat_type="dm",
)
key = build_session_key(source)
assert key == "agent:main:telegram:dm"
assert key == "agent:main:telegram:dm:99"
def test_distinct_dm_chat_ids_get_distinct_session_keys(self):
"""Different DM chats must not collapse into one shared session."""
first = SessionSource(platform=Platform.TELEGRAM, chat_id="99", chat_type="dm")
second = SessionSource(platform=Platform.TELEGRAM, chat_id="100", chat_type="dm")
assert build_session_key(first) == "agent:main:telegram:dm:99"
assert build_session_key(second) == "agent:main:telegram:dm:100"
assert build_session_key(first) != build_session_key(second)
def test_discord_group_includes_chat_id(self):
"""Group/channel keys include chat_type and chat_id."""

View File

@@ -12,6 +12,7 @@ import asyncio
import importlib
import os
import sys
from types import SimpleNamespace
from unittest.mock import AsyncMock, MagicMock, patch
import pytest
@@ -351,6 +352,26 @@ class TestDocumentDownloadBlock:
# ---------------------------------------------------------------------------
class TestMediaGroups:
@pytest.mark.asyncio
async def test_non_album_photo_burst_is_buffered_and_combined(self, adapter):
first_photo = _make_photo(_make_file_obj(b"first"))
second_photo = _make_photo(_make_file_obj(b"second"))
msg1 = _make_message(caption="two images", photo=[first_photo])
msg2 = _make_message(photo=[second_photo])
with patch("gateway.platforms.telegram.cache_image_from_bytes", side_effect=["/tmp/burst-one.jpg", "/tmp/burst-two.jpg"]):
await adapter._handle_media_message(_make_update(msg1), MagicMock())
await adapter._handle_media_message(_make_update(msg2), MagicMock())
assert adapter.handle_message.await_count == 0
await asyncio.sleep(adapter.MEDIA_GROUP_WAIT_SECONDS + 0.05)
adapter.handle_message.assert_awaited_once()
event = adapter.handle_message.await_args.args[0]
assert event.text == "two images"
assert event.media_urls == ["/tmp/burst-one.jpg", "/tmp/burst-two.jpg"]
assert len(event.media_types) == 2
@pytest.mark.asyncio
async def test_photo_album_is_buffered_and_combined(self, adapter):
first_photo = _make_photo(_make_file_obj(b"first"))
@@ -537,6 +558,51 @@ class TestSendDocument:
assert call_kwargs["reply_to_message_id"] == 50
class TestTelegramPhotoBatching:
@pytest.mark.asyncio
async def test_flush_photo_batch_does_not_drop_newer_scheduled_task(self, adapter):
old_task = MagicMock()
new_task = MagicMock()
batch_key = "session:photo-burst"
adapter._pending_photo_batch_tasks[batch_key] = new_task
adapter._pending_photo_batches[batch_key] = MessageEvent(
text="",
message_type=MessageType.PHOTO,
source=SimpleNamespace(channel_id="chat-1"),
media_urls=["/tmp/a.jpg"],
media_types=["image/jpeg"],
)
with (
patch("gateway.platforms.telegram.asyncio.current_task", return_value=old_task),
patch("gateway.platforms.telegram.asyncio.sleep", new=AsyncMock()),
):
await adapter._flush_photo_batch(batch_key)
assert adapter._pending_photo_batch_tasks[batch_key] is new_task
@pytest.mark.asyncio
async def test_disconnect_cancels_pending_photo_batch_tasks(self, adapter):
task = MagicMock()
task.done.return_value = False
adapter._pending_photo_batch_tasks["session:photo-burst"] = task
adapter._pending_photo_batches["session:photo-burst"] = MessageEvent(
text="",
message_type=MessageType.PHOTO,
source=SimpleNamespace(channel_id="chat-1"),
)
adapter._app = MagicMock()
adapter._app.updater.stop = AsyncMock()
adapter._app.stop = AsyncMock()
adapter._app.shutdown = AsyncMock()
await adapter.disconnect()
task.cancel.assert_called_once()
assert adapter._pending_photo_batch_tasks == {}
assert adapter._pending_photo_batches == {}
# ---------------------------------------------------------------------------
# TestSendVideo — outbound video delivery
# ---------------------------------------------------------------------------

View File

@@ -0,0 +1,49 @@
import asyncio
from unittest.mock import MagicMock
import pytest
from gateway.config import GatewayConfig, Platform, PlatformConfig
from gateway.platforms.base import MessageEvent, MessageType
from gateway.session import SessionSource, build_session_key
from gateway.run import GatewayRunner
class _PendingAdapter:
def __init__(self):
self._pending_messages = {}
def _make_runner():
runner = object.__new__(GatewayRunner)
runner.config = GatewayConfig(platforms={Platform.TELEGRAM: PlatformConfig(enabled=True, token="***")})
runner.adapters = {Platform.TELEGRAM: _PendingAdapter()}
runner._running_agents = {}
runner._pending_messages = {}
runner._pending_approvals = {}
runner._voice_mode = {}
runner._is_user_authorized = lambda _source: True
return runner
@pytest.mark.asyncio
async def test_handle_message_does_not_priority_interrupt_photo_followup():
runner = _make_runner()
source = SessionSource(platform=Platform.TELEGRAM, chat_id="12345", chat_type="dm")
session_key = build_session_key(source)
running_agent = MagicMock()
runner._running_agents[session_key] = running_agent
event = MessageEvent(
text="caption",
message_type=MessageType.PHOTO,
source=source,
media_urls=["/tmp/photo-a.jpg"],
media_types=["image/jpeg"],
)
result = await runner._handle_message(event)
assert result is None
running_agent.interrupt.assert_not_called()
assert runner.adapters[Platform.TELEGRAM]._pending_messages[session_key] is event

View File

@@ -0,0 +1,64 @@
import sys
def test_sessions_delete_accepts_unique_id_prefix(monkeypatch, capsys):
import hermes_cli.main as main_mod
import hermes_state
captured = {}
class FakeDB:
def resolve_session_id(self, session_id):
captured["resolved_from"] = session_id
return "20260315_092437_c9a6ff"
def delete_session(self, session_id):
captured["deleted"] = session_id
return True
def close(self):
captured["closed"] = True
monkeypatch.setattr(hermes_state, "SessionDB", lambda: FakeDB())
monkeypatch.setattr(
sys,
"argv",
["hermes", "sessions", "delete", "20260315_092437_c9a6", "--yes"],
)
main_mod.main()
output = capsys.readouterr().out
assert captured == {
"resolved_from": "20260315_092437_c9a6",
"deleted": "20260315_092437_c9a6ff",
"closed": True,
}
assert "Deleted session '20260315_092437_c9a6ff'." in output
def test_sessions_delete_reports_not_found_when_prefix_is_unknown(monkeypatch, capsys):
import hermes_cli.main as main_mod
import hermes_state
class FakeDB:
def resolve_session_id(self, session_id):
return None
def delete_session(self, session_id):
raise AssertionError("delete_session should not be called when resolution fails")
def close(self):
pass
monkeypatch.setattr(hermes_state, "SessionDB", lambda: FakeDB())
monkeypatch.setattr(
sys,
"argv",
["hermes", "sessions", "delete", "missing-prefix", "--yes"],
)
main_mod.main()
output = capsys.readouterr().out
assert "Session 'missing-prefix' not found." in output

View File

@@ -1,6 +1,13 @@
"""Tests for hermes_cli.tools_config platform tool persistence."""
from hermes_cli.tools_config import _get_platform_tools, _platform_toolset_summary, _toolset_has_keys
from unittest.mock import patch
from hermes_cli.tools_config import (
_get_platform_tools,
_platform_toolset_summary,
_save_platform_tools,
_toolset_has_keys,
)
def test_get_platform_tools_uses_default_when_platform_not_configured():
@@ -31,7 +38,7 @@ def test_platform_toolset_summary_uses_explicit_platform_list():
def test_toolset_has_keys_for_vision_accepts_codex_auth(tmp_path, monkeypatch):
monkeypatch.setenv("HERMES_HOME", str(tmp_path))
(tmp_path / "auth.json").write_text(
'{"active_provider":"openai-codex","providers":{"openai-codex":{"tokens":{"access_token":"codex-access-token","refresh_token":"codex-refresh-token"}}}}'
'{"active_provider":"openai-codex","providers":{"openai-codex":{"tokens":{"access_token": "codex-...oken","refresh_token": "codex-...oken"}}}}'
)
monkeypatch.delenv("OPENROUTER_API_KEY", raising=False)
monkeypatch.delenv("OPENAI_BASE_URL", raising=False)
@@ -40,3 +47,56 @@ def test_toolset_has_keys_for_vision_accepts_codex_auth(tmp_path, monkeypatch):
monkeypatch.delenv("CONTEXT_VISION_PROVIDER", raising=False)
assert _toolset_has_keys("vision") is True
def test_save_platform_tools_preserves_mcp_server_names():
"""Ensure MCP server names are preserved when saving platform tools.
Regression test for https://github.com/NousResearch/hermes-agent/issues/1247
"""
config = {
"platform_toolsets": {
"cli": ["web", "terminal", "time", "github", "custom-mcp-server"]
}
}
new_selection = {"web", "browser"}
with patch("hermes_cli.tools_config.save_config"):
_save_platform_tools(config, "cli", new_selection)
saved_toolsets = config["platform_toolsets"]["cli"]
assert "time" in saved_toolsets
assert "github" in saved_toolsets
assert "custom-mcp-server" in saved_toolsets
assert "web" in saved_toolsets
assert "browser" in saved_toolsets
assert "terminal" not in saved_toolsets
def test_save_platform_tools_handles_empty_existing_config():
"""Saving platform tools works when no existing config exists."""
config = {}
with patch("hermes_cli.tools_config.save_config"):
_save_platform_tools(config, "telegram", {"web", "terminal"})
saved_toolsets = config["platform_toolsets"]["telegram"]
assert "web" in saved_toolsets
assert "terminal" in saved_toolsets
def test_save_platform_tools_handles_invalid_existing_config():
"""Saving platform tools works when existing config is not a list."""
config = {
"platform_toolsets": {
"cli": "invalid-string-value"
}
}
with patch("hermes_cli.tools_config.save_config"):
_save_platform_tools(config, "cli", {"web"})
saved_toolsets = config["platform_toolsets"]["cli"]
assert "web" in saved_toolsets

View File

@@ -361,6 +361,24 @@ class TestDeleteAndExport:
def test_delete_nonexistent(self, db):
assert db.delete_session("nope") is False
def test_resolve_session_id_exact(self, db):
db.create_session(session_id="20260315_092437_c9a6ff", source="cli")
assert db.resolve_session_id("20260315_092437_c9a6ff") == "20260315_092437_c9a6ff"
def test_resolve_session_id_unique_prefix(self, db):
db.create_session(session_id="20260315_092437_c9a6ff", source="cli")
assert db.resolve_session_id("20260315_092437_c9a6") == "20260315_092437_c9a6ff"
def test_resolve_session_id_ambiguous_prefix_returns_none(self, db):
db.create_session(session_id="20260315_092437_c9a6aa", source="cli")
db.create_session(session_id="20260315_092437_c9a6bb", source="cli")
assert db.resolve_session_id("20260315_092437_c9a6") is None
def test_resolve_session_id_escapes_like_wildcards(self, db):
db.create_session(session_id="20260315_092437_c9a6ff", source="cli")
db.create_session(session_id="20260315X092437_c9a6ff", source="cli")
assert db.resolve_session_id("20260315_092437") == "20260315_092437_c9a6ff"
def test_export_session(self, db):
db.create_session(session_id="s1", source="cli", model="test")
db.append_message("s1", role="user", content="Hello")

View File

@@ -1,10 +1,11 @@
"""Tests for provider env var blocklist in LocalEnvironment.
"""Tests for subprocess env sanitization in LocalEnvironment.
Verifies that Hermes-internal provider env vars (OPENAI_BASE_URL, etc.)
are stripped from subprocess environments so external CLIs are not
silently misrouted.
Verifies that Hermes-managed provider, tool, and gateway env vars are
stripped from subprocess environments so external CLIs are not silently
misrouted or handed Hermes secrets.
See: https://github.com/NousResearch/hermes-agent/issues/1002
See: https://github.com/NousResearch/hermes-agent/issues/1264
"""
import os
@@ -110,6 +111,30 @@ class TestProviderEnvBlocklist:
for var in extra_provider_vars:
assert var not in result_env, f"{var} leaked into subprocess env"
def test_tool_and_gateway_vars_are_stripped(self):
"""Tool and gateway secrets/config must not leak into subprocess env."""
leaked_vars = {
"TELEGRAM_BOT_TOKEN": "bot-token",
"TELEGRAM_HOME_CHANNEL": "12345",
"DISCORD_HOME_CHANNEL": "67890",
"SLACK_APP_TOKEN": "xapp-secret",
"WHATSAPP_ALLOWED_USERS": "+15555550123",
"SIGNAL_ACCOUNT": "+15555550124",
"HASS_TOKEN": "ha-secret",
"EMAIL_PASSWORD": "email-secret",
"FIRECRAWL_API_KEY": "fc-secret",
"BROWSERBASE_PROJECT_ID": "bb-project",
"ELEVENLABS_API_KEY": "el-secret",
"GITHUB_TOKEN": "ghp_secret",
"GH_TOKEN": "gh_alias_secret",
"GATEWAY_ALLOW_ALL_USERS": "true",
"GATEWAY_ALLOWED_USERS": "alice,bob",
}
result_env = _run_with_env(extra_os_env=leaked_vars)
for var in leaked_vars:
assert var not in result_env, f"{var} leaked into subprocess env"
def test_safe_vars_are_preserved(self):
"""Standard env vars (PATH, HOME, USER) must still be passed through."""
result_env = _run_with_env()
@@ -205,3 +230,56 @@ class TestBlocklistCoverage:
"HELICONE_API_KEY",
}
assert extras.issubset(_HERMES_PROVIDER_ENV_BLOCKLIST)
def test_optional_tool_and_messaging_vars_are_in_blocklist(self):
"""Tool/messaging vars from OPTIONAL_ENV_VARS should stay covered."""
from hermes_cli.config import OPTIONAL_ENV_VARS
for name, metadata in OPTIONAL_ENV_VARS.items():
category = metadata.get("category")
if category in {"tool", "messaging"}:
assert name in _HERMES_PROVIDER_ENV_BLOCKLIST, (
f"Optional env var {name} (category={category}) missing from blocklist"
)
elif category == "setting" and metadata.get("password"):
assert name in _HERMES_PROVIDER_ENV_BLOCKLIST, (
f"Secret setting env var {name} missing from blocklist"
)
def test_gateway_runtime_vars_are_in_blocklist(self):
extras = {
"TELEGRAM_HOME_CHANNEL",
"TELEGRAM_HOME_CHANNEL_NAME",
"DISCORD_HOME_CHANNEL",
"DISCORD_HOME_CHANNEL_NAME",
"DISCORD_REQUIRE_MENTION",
"DISCORD_FREE_RESPONSE_CHANNELS",
"DISCORD_AUTO_THREAD",
"SLACK_HOME_CHANNEL",
"SLACK_HOME_CHANNEL_NAME",
"SLACK_ALLOWED_USERS",
"WHATSAPP_ENABLED",
"WHATSAPP_MODE",
"WHATSAPP_ALLOWED_USERS",
"SIGNAL_HTTP_URL",
"SIGNAL_ACCOUNT",
"SIGNAL_ALLOWED_USERS",
"SIGNAL_GROUP_ALLOWED_USERS",
"SIGNAL_HOME_CHANNEL",
"SIGNAL_HOME_CHANNEL_NAME",
"SIGNAL_IGNORE_STORIES",
"HASS_TOKEN",
"HASS_URL",
"EMAIL_ADDRESS",
"EMAIL_PASSWORD",
"EMAIL_IMAP_HOST",
"EMAIL_SMTP_HOST",
"EMAIL_HOME_ADDRESS",
"EMAIL_HOME_ADDRESS_NAME",
"GATEWAY_ALLOWED_USERS",
"GH_TOKEN",
"GITHUB_APP_ID",
"GITHUB_APP_PRIVATE_KEY_PATH",
"GITHUB_APP_INSTALLATION_ID",
}
assert extras.issubset(_HERMES_PROVIDER_ENV_BLOCKLIST)

View File

@@ -1,11 +1,13 @@
"""Tests for tools/process_registry.py — ProcessRegistry query methods, pruning, checkpoint."""
import json
import os
import time
import pytest
from pathlib import Path
from unittest.mock import MagicMock, patch
from tools.environments.local import _HERMES_PROVIDER_ENV_FORCE_PREFIX
from tools.process_registry import (
ProcessRegistry,
ProcessSession,
@@ -213,6 +215,54 @@ class TestPruning:
assert total <= MAX_PROCESSES
# =========================================================================
# Spawn env sanitization
# =========================================================================
class TestSpawnEnvSanitization:
def test_spawn_local_strips_blocked_vars_from_background_env(self, registry):
captured = {}
def fake_popen(cmd, **kwargs):
captured["env"] = kwargs["env"]
proc = MagicMock()
proc.pid = 4321
proc.stdout = iter([])
proc.stdin = MagicMock()
proc.poll.return_value = None
return proc
fake_thread = MagicMock()
with patch.dict(os.environ, {
"PATH": "/usr/bin:/bin",
"HOME": "/home/user",
"USER": "tester",
"TELEGRAM_BOT_TOKEN": "bot-secret",
"FIRECRAWL_API_KEY": "fc-secret",
}, clear=True), \
patch("tools.process_registry._find_shell", return_value="/bin/bash"), \
patch("subprocess.Popen", side_effect=fake_popen), \
patch("threading.Thread", return_value=fake_thread), \
patch.object(registry, "_write_checkpoint"):
registry.spawn_local(
"echo hello",
cwd="/tmp",
env_vars={
"MY_CUSTOM_VAR": "keep-me",
"TELEGRAM_BOT_TOKEN": "drop-me",
f"{_HERMES_PROVIDER_ENV_FORCE_PREFIX}TELEGRAM_BOT_TOKEN": "forced-bot-token",
},
)
env = captured["env"]
assert env["MY_CUSTOM_VAR"] == "keep-me"
assert env["TELEGRAM_BOT_TOKEN"] == "forced-bot-token"
assert "FIRECRAWL_API_KEY" not in env
assert f"{_HERMES_PROVIDER_ENV_FORCE_PREFIX}TELEGRAM_BOT_TOKEN" not in env
assert env["PYTHONUNBUFFERED"] == "1"
# =========================================================================
# Checkpoint
# =========================================================================

View File

@@ -27,11 +27,12 @@ _HERMES_PROVIDER_ENV_FORCE_PREFIX = "_HERMES_FORCE_"
def _build_provider_env_blocklist() -> frozenset:
"""Derive the blocklist from the provider registry + known extras.
"""Derive the blocklist from provider, tool, and gateway config.
Automatically picks up api_key_env_vars and base_url_env_var from
every registered provider, so adding a new provider to auth.py is
enough — no manual list to keep in sync.
every registered provider, plus tool/messaging env vars from the
optional config registry, so new Hermes-managed secrets are blocked
in subprocesses without having to maintain multiple static lists.
"""
blocked: set[str] = set()
@@ -44,7 +45,18 @@ def _build_provider_env_blocklist() -> frozenset:
except ImportError:
pass
# Vars not in the registry but still Hermes-internal / conflict-prone
try:
from hermes_cli.config import OPTIONAL_ENV_VARS
for name, metadata in OPTIONAL_ENV_VARS.items():
category = metadata.get("category")
if category in {"tool", "messaging"}:
blocked.add(name)
elif category == "setting" and metadata.get("password"):
blocked.add(name)
except ImportError:
pass
# Vars not covered above but still Hermes-internal / conflict-prone.
blocked.update({
"OPENAI_BASE_URL",
"OPENAI_API_KEY",
@@ -67,6 +79,41 @@ def _build_provider_env_blocklist() -> frozenset:
"FIREWORKS_API_KEY", # Fireworks AI
"XAI_API_KEY", # xAI (Grok)
"HELICONE_API_KEY", # LLM Observability proxy
# Gateway/runtime config not represented in OPTIONAL_ENV_VARS.
"TELEGRAM_HOME_CHANNEL",
"TELEGRAM_HOME_CHANNEL_NAME",
"DISCORD_HOME_CHANNEL",
"DISCORD_HOME_CHANNEL_NAME",
"DISCORD_REQUIRE_MENTION",
"DISCORD_FREE_RESPONSE_CHANNELS",
"DISCORD_AUTO_THREAD",
"SLACK_HOME_CHANNEL",
"SLACK_HOME_CHANNEL_NAME",
"SLACK_ALLOWED_USERS",
"WHATSAPP_ENABLED",
"WHATSAPP_MODE",
"WHATSAPP_ALLOWED_USERS",
"SIGNAL_HTTP_URL",
"SIGNAL_ACCOUNT",
"SIGNAL_ALLOWED_USERS",
"SIGNAL_GROUP_ALLOWED_USERS",
"SIGNAL_HOME_CHANNEL",
"SIGNAL_HOME_CHANNEL_NAME",
"SIGNAL_IGNORE_STORIES",
"HASS_TOKEN",
"HASS_URL",
"EMAIL_ADDRESS",
"EMAIL_PASSWORD",
"EMAIL_IMAP_HOST",
"EMAIL_SMTP_HOST",
"EMAIL_HOME_ADDRESS",
"EMAIL_HOME_ADDRESS_NAME",
"GATEWAY_ALLOWED_USERS",
# Skills Hub / GitHub app auth paths and aliases.
"GH_TOKEN",
"GITHUB_APP_ID",
"GITHUB_APP_PRIVATE_KEY_PATH",
"GITHUB_APP_INSTALLATION_ID",
})
return frozenset(blocked)
@@ -74,6 +121,30 @@ def _build_provider_env_blocklist() -> frozenset:
_HERMES_PROVIDER_ENV_BLOCKLIST = _build_provider_env_blocklist()
def _sanitize_subprocess_env(base_env: dict | None, extra_env: dict | None = None) -> dict:
"""Filter Hermes-managed secrets from a subprocess environment.
`_HERMES_FORCE_<VAR>` entries in ``extra_env`` opt a blocked variable back in
intentionally for callers that truly need it.
"""
sanitized: dict[str, str] = {}
for key, value in (base_env or {}).items():
if key.startswith(_HERMES_PROVIDER_ENV_FORCE_PREFIX):
continue
if key not in _HERMES_PROVIDER_ENV_BLOCKLIST:
sanitized[key] = value
for key, value in (extra_env or {}).items():
if key.startswith(_HERMES_PROVIDER_ENV_FORCE_PREFIX):
real_key = key[len(_HERMES_PROVIDER_ENV_FORCE_PREFIX):]
sanitized[real_key] = value
elif key not in _HERMES_PROVIDER_ENV_BLOCKLIST:
sanitized[key] = value
return sanitized
def _find_bash() -> str:
"""Find bash for command execution.
@@ -249,18 +320,11 @@ class LocalEnvironment(BaseEnvironment):
# Ensure PATH always includes standard dirs — systemd services
# and some terminal multiplexers inherit a minimal PATH.
_SANE_PATH = "/usr/local/sbin:/usr/local/bin:/usr/sbin:/usr/bin:/sbin:/bin"
# Strip Hermes-internal provider vars so external CLIs
# (e.g. codex) are not silently misrouted. Callers that
# truly need a blocked var can opt in by prefixing the key
# with _HERMES_FORCE_ in self.env (e.g. _HERMES_FORCE_OPENAI_API_KEY).
merged = dict(os.environ | self.env)
run_env = {}
for k, v in merged.items():
if k.startswith(_HERMES_PROVIDER_ENV_FORCE_PREFIX):
real_key = k[len(_HERMES_PROVIDER_ENV_FORCE_PREFIX):]
run_env[real_key] = v
elif k not in _HERMES_PROVIDER_ENV_BLOCKLIST:
run_env[k] = v
# Strip Hermes-managed provider/tool/gateway vars so external CLIs
# are not silently misrouted or handed Hermes secrets. Callers that
# truly need a blocked var can opt in by prefixing the key with
# _HERMES_FORCE_ in self.env (e.g. _HERMES_FORCE_OPENAI_API_KEY).
run_env = _sanitize_subprocess_env(os.environ, self.env)
existing_path = run_env.get("PATH", "")
if "/usr/bin" not in existing_path.split(":"):
run_env["PATH"] = f"{existing_path}:{_SANE_PATH}" if existing_path else _SANE_PATH

View File

@@ -42,7 +42,7 @@ import time
import uuid
_IS_WINDOWS = platform.system() == "Windows"
from tools.environments.local import _find_shell, _HERMES_PROVIDER_ENV_BLOCKLIST
from tools.environments.local import _find_shell, _sanitize_subprocess_env
from dataclasses import dataclass, field
from pathlib import Path
from typing import Any, Dict, List, Optional
@@ -155,9 +155,7 @@ class ProcessRegistry:
else:
from ptyprocess import PtyProcess as _PtyProcessCls
user_shell = _find_shell()
pty_env = {k: v for k, v in os.environ.items()
if k not in _HERMES_PROVIDER_ENV_BLOCKLIST}
pty_env.update(env_vars or {})
pty_env = _sanitize_subprocess_env(os.environ, env_vars)
pty_env["PYTHONUNBUFFERED"] = "1"
pty_proc = _PtyProcessCls.spawn(
[user_shell, "-lic", command],
@@ -198,9 +196,7 @@ class ProcessRegistry:
# Force unbuffered output for Python scripts so progress is visible
# during background execution (libraries like tqdm/datasets buffer when
# stdout is a pipe, hiding output from process(action="poll")).
bg_env = {k: v for k, v in os.environ.items()
if k not in _HERMES_PROVIDER_ENV_BLOCKLIST}
bg_env.update(env_vars or {})
bg_env = _sanitize_subprocess_env(os.environ, env_vars)
bg_env["PYTHONUNBUFFERED"] = "1"
proc = subprocess.Popen(
[user_shell, "-lic", command],