mirror of
https://github.com/NousResearch/hermes-agent.git
synced 2026-05-06 18:57:21 +08:00
Compare commits
10 Commits
v2026.4.3
...
feat/model
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
8320dfe900 | ||
|
|
a3ff98b34c | ||
|
|
7def061fee | ||
|
|
de5aacddd2 | ||
|
|
b1756084a3 | ||
|
|
8a384628a5 | ||
|
|
4979d77a4a | ||
|
|
a09fa690f0 | ||
|
|
6d357bb185 | ||
|
|
b3319b1252 |
@@ -113,6 +113,8 @@ DEFAULT_CONTEXT_LENGTHS = {
|
||||
"glm": 202752,
|
||||
# Kimi
|
||||
"kimi": 262144,
|
||||
# Arcee
|
||||
"trinity": 262144,
|
||||
# Hugging Face Inference Providers — model IDs use org/name format
|
||||
"Qwen/Qwen3.5-397B-A17B": 131072,
|
||||
"Qwen/Qwen3.5-35B-A3B": 131072,
|
||||
|
||||
12
cli.py
12
cli.py
@@ -3392,6 +3392,7 @@ class HermesCLI:
|
||||
from hermes_cli.models import (
|
||||
curated_models_for_provider, list_available_providers,
|
||||
normalize_provider, _PROVIDER_LABELS,
|
||||
get_pricing_for_provider, format_model_pricing_table,
|
||||
)
|
||||
from hermes_cli.auth import resolve_provider as _resolve_provider
|
||||
|
||||
@@ -3425,7 +3426,13 @@ class HermesCLI:
|
||||
marker = " ← active" if is_active else ""
|
||||
print(f" [{p['id']}]{marker}")
|
||||
curated = curated_models_for_provider(p["id"])
|
||||
if curated:
|
||||
# Fetch pricing for providers that support it (openrouter, nous)
|
||||
pricing_map = get_pricing_for_provider(p["id"]) if p["id"] in ("openrouter", "nous") else {}
|
||||
if curated and pricing_map:
|
||||
cur_model = self.model if is_active else ""
|
||||
for line in format_model_pricing_table(curated, pricing_map, current_model=cur_model):
|
||||
print(line)
|
||||
elif curated:
|
||||
for mid, desc in curated:
|
||||
current_marker = " ← current" if (is_active and mid == self.model) else ""
|
||||
print(f" {mid}{current_marker}")
|
||||
@@ -7010,6 +7017,9 @@ class HermesCLI:
|
||||
buffer.
|
||||
"""
|
||||
pasted_text = event.data or ""
|
||||
# Normalise line endings — Windows \r\n and old Mac \r both become \n
|
||||
# so the 5-line collapse threshold and display are consistent.
|
||||
pasted_text = pasted_text.replace('\r\n', '\n').replace('\r', '\n')
|
||||
if self._try_attach_clipboard_image():
|
||||
event.app.invalidate()
|
||||
if pasted_text:
|
||||
|
||||
@@ -235,6 +235,7 @@ SUPPORTED_DOCUMENT_TYPES = {
|
||||
".pdf": "application/pdf",
|
||||
".md": "text/markdown",
|
||||
".txt": "text/plain",
|
||||
".zip": "application/zip",
|
||||
".docx": "application/vnd.openxmlformats-officedocument.wordprocessingml.document",
|
||||
".xlsx": "application/vnd.openxmlformats-officedocument.spreadsheetml.sheet",
|
||||
".pptx": "application/vnd.openxmlformats-officedocument.presentationml.presentation",
|
||||
|
||||
@@ -667,12 +667,13 @@ class GatewayRunner:
|
||||
# what's already saved and avoid overwriting newer entries.
|
||||
_current_memory = ""
|
||||
try:
|
||||
from tools.memory_tool import MEMORY_DIR
|
||||
from tools.memory_tool import get_memory_dir
|
||||
_mem_dir = get_memory_dir()
|
||||
for fname, label in [
|
||||
("MEMORY.md", "MEMORY (your personal notes)"),
|
||||
("USER.md", "USER PROFILE (who the user is)"),
|
||||
]:
|
||||
fpath = MEMORY_DIR / fname
|
||||
fpath = _mem_dir / fname
|
||||
if fpath.exists():
|
||||
content = fpath.read_text(encoding="utf-8").strip()
|
||||
if content:
|
||||
|
||||
@@ -2143,8 +2143,18 @@ def _reset_config_provider() -> Path:
|
||||
return config_path
|
||||
|
||||
|
||||
def _prompt_model_selection(model_ids: List[str], current_model: str = "") -> Optional[str]:
|
||||
"""Interactive model selection. Puts current_model first with a marker. Returns chosen model ID or None."""
|
||||
def _prompt_model_selection(
|
||||
model_ids: List[str],
|
||||
current_model: str = "",
|
||||
pricing: Optional[Dict[str, Dict[str, str]]] = None,
|
||||
) -> Optional[str]:
|
||||
"""Interactive model selection. Puts current_model first with a marker. Returns chosen model ID or None.
|
||||
|
||||
If *pricing* is provided (``{model_id: {prompt, completion}}``), a compact
|
||||
price indicator is shown next to each model in aligned columns.
|
||||
"""
|
||||
from hermes_cli.models import _format_price_per_mtok
|
||||
|
||||
# Reorder: current model first, then the rest (deduplicated)
|
||||
ordered = []
|
||||
if current_model and current_model in model_ids:
|
||||
@@ -2153,15 +2163,61 @@ def _prompt_model_selection(model_ids: List[str], current_model: str = "") -> Op
|
||||
if mid not in ordered:
|
||||
ordered.append(mid)
|
||||
|
||||
# Build display labels with marker on current
|
||||
# Column-aligned labels when pricing is available
|
||||
has_pricing = bool(pricing and any(pricing.get(m) for m in ordered))
|
||||
name_col = max((len(m) for m in ordered), default=0) + 2 if has_pricing else 0
|
||||
|
||||
# Pre-compute formatted prices and dynamic column widths
|
||||
_price_cache: dict[str, tuple[str, str, str]] = {}
|
||||
price_col = 3 # minimum width
|
||||
cache_col = 0 # only set if any model has cache pricing
|
||||
has_cache = False
|
||||
if has_pricing:
|
||||
for mid in ordered:
|
||||
p = pricing.get(mid) # type: ignore[union-attr]
|
||||
if p:
|
||||
inp = _format_price_per_mtok(p.get("prompt", ""))
|
||||
out = _format_price_per_mtok(p.get("completion", ""))
|
||||
cache_read = p.get("input_cache_read", "")
|
||||
cache = _format_price_per_mtok(cache_read) if cache_read else ""
|
||||
if cache:
|
||||
has_cache = True
|
||||
else:
|
||||
inp, out, cache = "", "", ""
|
||||
_price_cache[mid] = (inp, out, cache)
|
||||
price_col = max(price_col, len(inp), len(out))
|
||||
cache_col = max(cache_col, len(cache))
|
||||
if has_cache:
|
||||
cache_col = max(cache_col, 5) # minimum: "Cache" header
|
||||
|
||||
def _label(mid):
|
||||
if has_pricing:
|
||||
inp, out, cache = _price_cache.get(mid, ("", "", ""))
|
||||
price_part = f" {inp:>{price_col}} {out:>{price_col}}"
|
||||
if has_cache:
|
||||
price_part += f" {cache:>{cache_col}}"
|
||||
base = f"{mid:<{name_col}}{price_part}"
|
||||
else:
|
||||
base = mid
|
||||
if mid == current_model:
|
||||
return f"{mid} ← currently in use"
|
||||
return mid
|
||||
base += " ← currently in use"
|
||||
return base
|
||||
|
||||
# Default cursor on the current model (index 0 if it was reordered to top)
|
||||
default_idx = 0
|
||||
|
||||
# Build a pricing header hint for the menu title
|
||||
menu_title = "Select default model:"
|
||||
if has_pricing:
|
||||
# Align the header with the model column.
|
||||
# Each choice is " {label}" (2 spaces) and simple_term_menu prepends
|
||||
# a 3-char cursor region ("-> " or " "), so content starts at col 5.
|
||||
pad = " " * 5
|
||||
header = f"\n{pad}{'':>{name_col}} {'In':>{price_col}} {'Out':>{price_col}}"
|
||||
if has_cache:
|
||||
header += f" {'Cache':>{cache_col}}"
|
||||
menu_title += header + " /Mtok"
|
||||
|
||||
# Try arrow-key menu first, fall back to number input
|
||||
try:
|
||||
from simple_term_menu import TerminalMenu
|
||||
@@ -2176,7 +2232,7 @@ def _prompt_model_selection(model_ids: List[str], current_model: str = "") -> Op
|
||||
menu_highlight_style=("fg_green",),
|
||||
cycle_cursor=True,
|
||||
clear_screen=False,
|
||||
title="Select default model:",
|
||||
title=menu_title,
|
||||
)
|
||||
idx = menu.show()
|
||||
if idx is None:
|
||||
@@ -2192,12 +2248,13 @@ def _prompt_model_selection(model_ids: List[str], current_model: str = "") -> Op
|
||||
pass
|
||||
|
||||
# Fallback: numbered list
|
||||
print("Select default model:")
|
||||
print(menu_title)
|
||||
num_width = len(str(len(ordered) + 2))
|
||||
for i, mid in enumerate(ordered, 1):
|
||||
print(f" {i}. {_label(mid)}")
|
||||
print(f" {i:>{num_width}}. {_label(mid)}")
|
||||
n = len(ordered)
|
||||
print(f" {n + 1}. Enter custom model name")
|
||||
print(f" {n + 2}. Skip (keep current)")
|
||||
print(f" {n + 1:>{num_width}}. Enter custom model name")
|
||||
print(f" {n + 2:>{num_width}}. Skip (keep current)")
|
||||
print()
|
||||
|
||||
while True:
|
||||
|
||||
@@ -1088,10 +1088,13 @@ def _model_flow_openrouter(config, current_model=""):
|
||||
print("API key saved.")
|
||||
print()
|
||||
|
||||
from hermes_cli.models import model_ids
|
||||
from hermes_cli.models import model_ids, get_pricing_for_provider
|
||||
openrouter_models = model_ids()
|
||||
|
||||
selected = _prompt_model_selection(openrouter_models, current_model=current_model)
|
||||
# Fetch live pricing (non-blocking — returns empty dict on failure)
|
||||
pricing = get_pricing_for_provider("openrouter")
|
||||
|
||||
selected = _prompt_model_selection(openrouter_models, current_model=current_model, pricing=pricing)
|
||||
if selected:
|
||||
_save_model_choice(selected)
|
||||
|
||||
@@ -1158,7 +1161,7 @@ def _model_flow_nous(config, current_model="", args=None):
|
||||
# Already logged in — use curated model list (same as OpenRouter defaults).
|
||||
# The live /models endpoint returns hundreds of models; the curated list
|
||||
# shows only agentic models users recognize from OpenRouter.
|
||||
from hermes_cli.models import _PROVIDER_MODELS
|
||||
from hermes_cli.models import _PROVIDER_MODELS, get_pricing_for_provider
|
||||
model_ids = _PROVIDER_MODELS.get("nous", [])
|
||||
if not model_ids:
|
||||
print("No curated models available for Nous Portal.")
|
||||
@@ -1188,7 +1191,10 @@ def _model_flow_nous(config, current_model="", args=None):
|
||||
print(f"Could not verify credentials: {msg}")
|
||||
return
|
||||
|
||||
selected = _prompt_model_selection(model_ids, current_model=current_model)
|
||||
# Fetch live pricing (non-blocking — returns empty dict on failure)
|
||||
pricing = get_pricing_for_provider("nous")
|
||||
|
||||
selected = _prompt_model_selection(model_ids, current_model=current_model, pricing=pricing)
|
||||
if selected:
|
||||
_save_model_choice(selected)
|
||||
# Reactivate Nous as the provider and update config
|
||||
|
||||
@@ -51,6 +51,7 @@ OPENROUTER_MODELS: list[tuple[str, str]] = [
|
||||
("nvidia/nemotron-3-super-120b-a12b", ""),
|
||||
("nvidia/nemotron-3-super-120b-a12b:free", "free"),
|
||||
("arcee-ai/trinity-large-preview:free", "free"),
|
||||
("arcee-ai/trinity-large-thinking", ""),
|
||||
("openai/gpt-5.4-pro", ""),
|
||||
("openai/gpt-5.4-nano", ""),
|
||||
]
|
||||
@@ -82,6 +83,7 @@ _PROVIDER_MODELS: dict[str, list[str]] = {
|
||||
"nvidia/nemotron-3-super-120b-a12b",
|
||||
"nvidia/nemotron-3-super-120b-a12b:free",
|
||||
"arcee-ai/trinity-large-preview:free",
|
||||
"arcee-ai/trinity-large-thinking",
|
||||
"openai/gpt-5.4-pro",
|
||||
"openai/gpt-5.4-nano",
|
||||
],
|
||||
@@ -322,6 +324,213 @@ def menu_labels() -> list[str]:
|
||||
return labels
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Pricing helpers — fetch live pricing from OpenRouter-compatible /v1/models
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
# Cache: maps model_id → {"prompt": str, "completion": str} per endpoint
|
||||
_pricing_cache: dict[str, dict[str, dict[str, str]]] = {}
|
||||
|
||||
|
||||
def _format_price_per_mtok(per_token_str: str) -> str:
|
||||
"""Convert a per-token price string to a human-friendly $/Mtok string.
|
||||
|
||||
Always uses 2 decimal places so that prices align vertically when
|
||||
right-justified in a column (the decimal point stays in the same position).
|
||||
|
||||
Examples:
|
||||
"0.000003" → "$3.00" (per million tokens)
|
||||
"0.00003" → "$30.00"
|
||||
"0.00000015" → "$0.15"
|
||||
"0.0000001" → "$0.10"
|
||||
"0.00018" → "$180.00"
|
||||
"0" → "free"
|
||||
"""
|
||||
try:
|
||||
val = float(per_token_str)
|
||||
except (TypeError, ValueError):
|
||||
return "?"
|
||||
if val == 0:
|
||||
return "free"
|
||||
per_m = val * 1_000_000
|
||||
return f"${per_m:.2f}"
|
||||
|
||||
|
||||
def format_pricing_label(pricing: dict[str, str] | None) -> str:
|
||||
"""Build a compact pricing label like 'in $3 · out $15 · cache $0.30/Mtok'.
|
||||
|
||||
Returns empty string when pricing is unavailable.
|
||||
"""
|
||||
if not pricing:
|
||||
return ""
|
||||
prompt_price = pricing.get("prompt", "")
|
||||
completion_price = pricing.get("completion", "")
|
||||
if not prompt_price and not completion_price:
|
||||
return ""
|
||||
inp = _format_price_per_mtok(prompt_price)
|
||||
out = _format_price_per_mtok(completion_price)
|
||||
if inp == "free" and out == "free":
|
||||
return "free"
|
||||
cache_read = pricing.get("input_cache_read", "")
|
||||
cache_str = _format_price_per_mtok(cache_read) if cache_read else ""
|
||||
if inp == out and not cache_str:
|
||||
return f"{inp}/Mtok"
|
||||
parts = [f"in {inp}", f"out {out}"]
|
||||
if cache_str and cache_str != "?" and cache_str != inp:
|
||||
parts.append(f"cache {cache_str}")
|
||||
return " · ".join(parts) + "/Mtok"
|
||||
|
||||
|
||||
def format_model_pricing_table(
|
||||
models: list[tuple[str, str]],
|
||||
pricing_map: dict[str, dict[str, str]],
|
||||
current_model: str = "",
|
||||
indent: str = " ",
|
||||
) -> list[str]:
|
||||
"""Build a column-aligned model+pricing table for terminal display.
|
||||
|
||||
Returns a list of pre-formatted lines ready to print.
|
||||
*models* is ``[(model_id, description), ...]``.
|
||||
"""
|
||||
if not models:
|
||||
return []
|
||||
|
||||
# Build rows: (model_id, input_price, output_price, cache_price, is_current)
|
||||
rows: list[tuple[str, str, str, str, bool]] = []
|
||||
has_cache = False
|
||||
for mid, _desc in models:
|
||||
is_cur = mid == current_model
|
||||
p = pricing_map.get(mid)
|
||||
if p:
|
||||
inp = _format_price_per_mtok(p.get("prompt", ""))
|
||||
out = _format_price_per_mtok(p.get("completion", ""))
|
||||
cache_read = p.get("input_cache_read", "")
|
||||
cache = _format_price_per_mtok(cache_read) if cache_read else ""
|
||||
if cache:
|
||||
has_cache = True
|
||||
else:
|
||||
inp, out, cache = "", "", ""
|
||||
rows.append((mid, inp, out, cache, is_cur))
|
||||
|
||||
name_col = max(len(r[0]) for r in rows) + 2
|
||||
# Compute price column widths from the actual data so decimals align
|
||||
price_col = max(
|
||||
max((len(r[1]) for r in rows if r[1]), default=4),
|
||||
max((len(r[2]) for r in rows if r[2]), default=4),
|
||||
3, # minimum: "In" / "Out" header
|
||||
)
|
||||
cache_col = max(
|
||||
max((len(r[3]) for r in rows if r[3]), default=4),
|
||||
5, # minimum: "Cache" header
|
||||
) if has_cache else 0
|
||||
lines: list[str] = []
|
||||
|
||||
# Header
|
||||
if has_cache:
|
||||
lines.append(f"{indent}{'Model':<{name_col}} {'In':>{price_col}} {'Out':>{price_col}} {'Cache':>{cache_col}} /Mtok")
|
||||
lines.append(f"{indent}{'-' * name_col} {'-' * price_col} {'-' * price_col} {'-' * cache_col}")
|
||||
else:
|
||||
lines.append(f"{indent}{'Model':<{name_col}} {'In':>{price_col}} {'Out':>{price_col}} /Mtok")
|
||||
lines.append(f"{indent}{'-' * name_col} {'-' * price_col} {'-' * price_col}")
|
||||
|
||||
for mid, inp, out, cache, is_cur in rows:
|
||||
marker = " ← current" if is_cur else ""
|
||||
if has_cache:
|
||||
lines.append(f"{indent}{mid:<{name_col}} {inp:>{price_col}} {out:>{price_col}} {cache:>{cache_col}}{marker}")
|
||||
else:
|
||||
lines.append(f"{indent}{mid:<{name_col}} {inp:>{price_col}} {out:>{price_col}}{marker}")
|
||||
|
||||
return lines
|
||||
|
||||
|
||||
def fetch_models_with_pricing(
|
||||
api_key: str | None = None,
|
||||
base_url: str = "https://openrouter.ai/api",
|
||||
timeout: float = 8.0,
|
||||
*,
|
||||
force_refresh: bool = False,
|
||||
) -> dict[str, dict[str, str]]:
|
||||
"""Fetch ``/v1/models`` and return ``{model_id: {prompt, completion}}`` pricing.
|
||||
|
||||
Results are cached per *base_url* so repeated calls are free.
|
||||
Works with any OpenRouter-compatible endpoint (OpenRouter, Nous Portal).
|
||||
"""
|
||||
cache_key = (base_url or "").rstrip("/")
|
||||
if not force_refresh and cache_key in _pricing_cache:
|
||||
return _pricing_cache[cache_key]
|
||||
|
||||
url = cache_key.rstrip("/") + "/v1/models"
|
||||
headers: dict[str, str] = {"Accept": "application/json"}
|
||||
if api_key:
|
||||
headers["Authorization"] = f"Bearer {api_key}"
|
||||
|
||||
try:
|
||||
req = urllib.request.Request(url, headers=headers)
|
||||
with urllib.request.urlopen(req, timeout=timeout) as resp:
|
||||
payload = json.loads(resp.read().decode())
|
||||
except Exception:
|
||||
_pricing_cache[cache_key] = {}
|
||||
return {}
|
||||
|
||||
result: dict[str, dict[str, str]] = {}
|
||||
for item in payload.get("data", []):
|
||||
mid = item.get("id")
|
||||
pricing = item.get("pricing")
|
||||
if mid and isinstance(pricing, dict):
|
||||
entry: dict[str, str] = {
|
||||
"prompt": str(pricing.get("prompt", "")),
|
||||
"completion": str(pricing.get("completion", "")),
|
||||
}
|
||||
if pricing.get("input_cache_read"):
|
||||
entry["input_cache_read"] = str(pricing["input_cache_read"])
|
||||
if pricing.get("input_cache_write"):
|
||||
entry["input_cache_write"] = str(pricing["input_cache_write"])
|
||||
result[mid] = entry
|
||||
|
||||
_pricing_cache[cache_key] = result
|
||||
return result
|
||||
|
||||
|
||||
def _resolve_openrouter_api_key() -> str:
|
||||
"""Best-effort OpenRouter API key for pricing fetch."""
|
||||
return os.getenv("OPENROUTER_API_KEY", "").strip()
|
||||
|
||||
|
||||
def _resolve_nous_pricing_credentials() -> tuple[str, str]:
|
||||
"""Return ``(api_key, base_url)`` for Nous Portal pricing, or empty strings."""
|
||||
try:
|
||||
from hermes_cli.auth import resolve_nous_runtime_credentials
|
||||
creds = resolve_nous_runtime_credentials()
|
||||
if creds:
|
||||
return (creds.get("api_key", ""), creds.get("base_url", ""))
|
||||
except Exception:
|
||||
pass
|
||||
return ("", "")
|
||||
|
||||
|
||||
def get_pricing_for_provider(provider: str) -> dict[str, dict[str, str]]:
|
||||
"""Return live pricing for providers that support it (openrouter, nous)."""
|
||||
normalized = normalize_provider(provider)
|
||||
if normalized == "openrouter":
|
||||
return fetch_models_with_pricing(
|
||||
api_key=_resolve_openrouter_api_key(),
|
||||
base_url="https://openrouter.ai/api",
|
||||
)
|
||||
if normalized == "nous":
|
||||
api_key, base_url = _resolve_nous_pricing_credentials()
|
||||
if base_url:
|
||||
# Nous base_url typically looks like https://inference-api.nousresearch.com/v1
|
||||
# We need the part before /v1 for our fetch function
|
||||
stripped = base_url.rstrip("/")
|
||||
if stripped.endswith("/v1"):
|
||||
stripped = stripped[:-3]
|
||||
return fetch_models_with_pricing(
|
||||
api_key=api_key,
|
||||
base_url=stripped,
|
||||
)
|
||||
return {}
|
||||
|
||||
|
||||
# All provider IDs and aliases that are valid for the provider:model syntax.
|
||||
_KNOWN_PROVIDER_NAMES: set[str] = (
|
||||
set(_PROVIDER_LABELS.keys())
|
||||
|
||||
@@ -51,6 +51,14 @@ _CLONE_CONFIG_FILES = [
|
||||
"SOUL.md",
|
||||
]
|
||||
|
||||
# Subdirectory files copied during --clone (path relative to profile root).
|
||||
# Memory files are part of the agent's curated identity — just as important
|
||||
# as SOUL.md for continuity when cloning a profile.
|
||||
_CLONE_SUBDIR_FILES = [
|
||||
"memories/MEMORY.md",
|
||||
"memories/USER.md",
|
||||
]
|
||||
|
||||
# Runtime files stripped after --clone-all (shouldn't carry over)
|
||||
_CLONE_ALL_STRIP = [
|
||||
"gateway.pid",
|
||||
@@ -428,6 +436,14 @@ def create_profile(
|
||||
if src.exists():
|
||||
shutil.copy2(src, profile_dir / filename)
|
||||
|
||||
# Clone memory and other subdirectory files
|
||||
for relpath in _CLONE_SUBDIR_FILES:
|
||||
src = source_dir / relpath
|
||||
if src.exists():
|
||||
dst = profile_dir / relpath
|
||||
dst.parent.mkdir(parents=True, exist_ok=True)
|
||||
shutil.copy2(src, dst)
|
||||
|
||||
return profile_dir
|
||||
|
||||
|
||||
|
||||
@@ -349,13 +349,6 @@ class SessionDB:
|
||||
|
||||
self._conn.commit()
|
||||
|
||||
def close(self):
|
||||
"""Close the database connection."""
|
||||
with self._lock:
|
||||
if self._conn:
|
||||
self._conn.close()
|
||||
self._conn = None
|
||||
|
||||
# =========================================================================
|
||||
# Session lifecycle
|
||||
# =========================================================================
|
||||
|
||||
@@ -32,7 +32,7 @@ from agent.memory_provider import MemoryProvider
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Timeouts
|
||||
_QUERY_TIMEOUT = 30 # brv query — should be fast
|
||||
_QUERY_TIMEOUT = 10 # brv query — should be fast
|
||||
_CURATE_TIMEOUT = 120 # brv curate — may involve LLM processing
|
||||
|
||||
# Minimum lengths to filter noise
|
||||
@@ -175,9 +175,6 @@ class ByteRoverMemoryProvider(MemoryProvider):
|
||||
self._cwd = ""
|
||||
self._session_id = ""
|
||||
self._turn_count = 0
|
||||
self._prefetch_result = ""
|
||||
self._prefetch_lock = threading.Lock()
|
||||
self._prefetch_thread: Optional[threading.Thread] = None
|
||||
self._sync_thread: Optional[threading.Thread] = None
|
||||
|
||||
@property
|
||||
@@ -216,37 +213,26 @@ class ByteRoverMemoryProvider(MemoryProvider):
|
||||
)
|
||||
|
||||
def prefetch(self, query: str, *, session_id: str = "") -> str:
|
||||
if self._prefetch_thread and self._prefetch_thread.is_alive():
|
||||
self._prefetch_thread.join(timeout=3.0)
|
||||
with self._prefetch_lock:
|
||||
result = self._prefetch_result
|
||||
self._prefetch_result = ""
|
||||
if not result:
|
||||
"""Run brv query synchronously before the agent's first LLM call.
|
||||
|
||||
Blocks until the query completes (up to _QUERY_TIMEOUT seconds), ensuring
|
||||
the result is available as context before the model is called.
|
||||
"""
|
||||
if not query or len(query.strip()) < _MIN_QUERY_LEN:
|
||||
return ""
|
||||
return f"## ByteRover Context\n{result}"
|
||||
result = _run_brv(
|
||||
["query", "--", query.strip()[:5000]],
|
||||
timeout=_QUERY_TIMEOUT, cwd=self._cwd,
|
||||
)
|
||||
if result["success"] and result.get("output"):
|
||||
output = result["output"].strip()
|
||||
if len(output) > _MIN_OUTPUT_LEN:
|
||||
return f"## ByteRover Context\n{output}"
|
||||
return ""
|
||||
|
||||
def queue_prefetch(self, query: str, *, session_id: str = "") -> None:
|
||||
if not query or len(query.strip()) < _MIN_QUERY_LEN:
|
||||
return
|
||||
|
||||
def _run():
|
||||
try:
|
||||
result = _run_brv(
|
||||
["query", "--", query.strip()[:5000]],
|
||||
timeout=_QUERY_TIMEOUT, cwd=self._cwd,
|
||||
)
|
||||
if result["success"] and result.get("output"):
|
||||
output = result["output"].strip()
|
||||
if len(output) > _MIN_OUTPUT_LEN:
|
||||
with self._prefetch_lock:
|
||||
self._prefetch_result = output
|
||||
except Exception as e:
|
||||
logger.debug("ByteRover prefetch failed: %s", e)
|
||||
|
||||
self._prefetch_thread = threading.Thread(
|
||||
target=_run, daemon=True, name="brv-prefetch"
|
||||
)
|
||||
self._prefetch_thread.start()
|
||||
"""No-op: prefetch() now runs synchronously at turn start."""
|
||||
pass
|
||||
|
||||
def sync_turn(self, user_content: str, assistant_content: str, *, session_id: str = "") -> None:
|
||||
"""Curate the conversation turn in background (non-blocking)."""
|
||||
@@ -338,9 +324,8 @@ class ByteRoverMemoryProvider(MemoryProvider):
|
||||
return json.dumps({"error": f"Unknown tool: {tool_name}"})
|
||||
|
||||
def shutdown(self) -> None:
|
||||
for t in (self._sync_thread, self._prefetch_thread):
|
||||
if t and t.is_alive():
|
||||
t.join(timeout=10.0)
|
||||
if self._sync_thread and self._sync_thread.is_alive():
|
||||
self._sync_thread.join(timeout=10.0)
|
||||
|
||||
# -- Tool implementations ------------------------------------------------
|
||||
|
||||
|
||||
@@ -8,7 +8,7 @@ Original plugin by dusterbloom (PR #2351), adapted to the MemoryProvider ABC.
|
||||
Config in $HERMES_HOME/config.yaml (profile-scoped):
|
||||
plugins:
|
||||
hermes-memory-store:
|
||||
db_path: $HERMES_HOME/memory_store.db
|
||||
db_path: $HERMES_HOME/memory_store.db # omit to use the default
|
||||
auto_extract: false
|
||||
default_trust: 0.5
|
||||
min_trust_threshold: 0.3
|
||||
@@ -156,8 +156,15 @@ class HolographicMemoryProvider(MemoryProvider):
|
||||
|
||||
def initialize(self, session_id: str, **kwargs) -> None:
|
||||
from hermes_constants import get_hermes_home
|
||||
_default_db = str(get_hermes_home() / "memory_store.db")
|
||||
_hermes_home = str(get_hermes_home())
|
||||
_default_db = _hermes_home + "/memory_store.db"
|
||||
db_path = self._config.get("db_path", _default_db)
|
||||
# Expand $HERMES_HOME in user-supplied paths so config values like
|
||||
# "$HERMES_HOME/memory_store.db" or "~/.hermes/memory_store.db" both
|
||||
# resolve to the active profile's directory.
|
||||
if isinstance(db_path, str):
|
||||
db_path = db_path.replace("$HERMES_HOME", _hermes_home)
|
||||
db_path = db_path.replace("${HERMES_HOME}", _hermes_home)
|
||||
default_trust = float(self._config.get("default_trust", 0.5))
|
||||
hrr_dim = int(self._config.get("hrr_dim", 1024))
|
||||
hrr_weight = float(self._config.get("hrr_weight", 0.3))
|
||||
|
||||
@@ -227,16 +227,19 @@ class TestIncomingDocumentHandling:
|
||||
adapter.handle_message.assert_called_once()
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_unsupported_type_skipped(self, adapter):
|
||||
"""An unsupported file type (.zip) should be skipped silently."""
|
||||
async def test_zip_document_cached(self, adapter):
|
||||
"""A .zip file should be cached as a supported document."""
|
||||
msg = make_message([
|
||||
make_attachment(filename="archive.zip", content_type="application/zip")
|
||||
])
|
||||
await adapter._handle_message(msg)
|
||||
|
||||
with _mock_aiohttp_download(b"PK\x03\x04test"):
|
||||
await adapter._handle_message(msg)
|
||||
|
||||
event = adapter.handle_message.call_args[0][0]
|
||||
assert event.media_urls == []
|
||||
assert event.message_type == MessageType.TEXT
|
||||
assert len(event.media_urls) == 1
|
||||
assert event.media_types == ["application/zip"]
|
||||
assert event.message_type == MessageType.DOCUMENT
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_download_error_handled(self, adapter):
|
||||
|
||||
@@ -151,7 +151,7 @@ class TestSupportedDocumentTypes:
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"ext",
|
||||
[".pdf", ".md", ".txt", ".docx", ".xlsx", ".pptx"],
|
||||
[".pdf", ".md", ".txt", ".zip", ".docx", ".xlsx", ".pptx"],
|
||||
)
|
||||
def test_expected_extensions_present(self, ext):
|
||||
assert ext in SUPPORTED_DOCUMENT_TYPES
|
||||
|
||||
@@ -95,7 +95,7 @@ class TestMemoryInjection:
|
||||
with (
|
||||
patch("gateway.run._resolve_runtime_agent_kwargs", return_value={"api_key": "k"}),
|
||||
patch("gateway.run._resolve_gateway_model", return_value="test-model"),
|
||||
patch.dict("sys.modules", {"tools.memory_tool": MagicMock(MEMORY_DIR=memory_dir)}),
|
||||
patch.dict("sys.modules", {"tools.memory_tool": MagicMock(get_memory_dir=lambda: memory_dir)}),
|
||||
):
|
||||
runner._flush_memories_for_session("session_123")
|
||||
|
||||
@@ -119,7 +119,7 @@ class TestMemoryInjection:
|
||||
with (
|
||||
patch("gateway.run._resolve_runtime_agent_kwargs", return_value={"api_key": "k"}),
|
||||
patch("gateway.run._resolve_gateway_model", return_value="test-model"),
|
||||
patch.dict("sys.modules", {"tools.memory_tool": MagicMock(MEMORY_DIR=empty_dir)}),
|
||||
patch.dict("sys.modules", {"tools.memory_tool": MagicMock(get_memory_dir=lambda: empty_dir)}),
|
||||
):
|
||||
runner._flush_memories_for_session("session_456")
|
||||
|
||||
@@ -140,7 +140,7 @@ class TestMemoryInjection:
|
||||
with (
|
||||
patch("gateway.run._resolve_runtime_agent_kwargs", return_value={"api_key": "k"}),
|
||||
patch("gateway.run._resolve_gateway_model", return_value="test-model"),
|
||||
patch.dict("sys.modules", {"tools.memory_tool": MagicMock(MEMORY_DIR=memory_dir)}),
|
||||
patch.dict("sys.modules", {"tools.memory_tool": MagicMock(get_memory_dir=lambda: memory_dir)}),
|
||||
):
|
||||
runner._flush_memories_for_session("session_789")
|
||||
|
||||
@@ -171,7 +171,7 @@ class TestFlushAgentSilenced:
|
||||
with (
|
||||
patch("gateway.run._resolve_runtime_agent_kwargs", return_value={"api_key": "k"}),
|
||||
patch("gateway.run._resolve_gateway_model", return_value="test-model"),
|
||||
patch.dict("sys.modules", {"tools.memory_tool": MagicMock(MEMORY_DIR=tmp_path)}),
|
||||
patch.dict("sys.modules", {"tools.memory_tool": MagicMock(get_memory_dir=lambda: tmp_path)}),
|
||||
):
|
||||
runner._flush_memories_for_session("session_silent")
|
||||
|
||||
@@ -213,7 +213,7 @@ class TestFlushPromptStructure:
|
||||
with (
|
||||
patch("gateway.run._resolve_runtime_agent_kwargs", return_value={"api_key": "k"}),
|
||||
patch("gateway.run._resolve_gateway_model", return_value="test-model"),
|
||||
patch.dict("sys.modules", {"tools.memory_tool": MagicMock(MEMORY_DIR=Path("/nonexistent"))}),
|
||||
patch.dict("sys.modules", {"tools.memory_tool": MagicMock(get_memory_dir=lambda: Path("/nonexistent"))}),
|
||||
):
|
||||
runner._flush_memories_for_session("session_struct")
|
||||
|
||||
|
||||
@@ -408,19 +408,22 @@ class TestIncomingDocumentHandling:
|
||||
assert "[Content of" not in (msg_event.text or "")
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_unsupported_file_type_skipped(self, adapter):
|
||||
"""A .zip file should be silently skipped."""
|
||||
event = self._make_event(files=[{
|
||||
"mimetype": "application/zip",
|
||||
"name": "archive.zip",
|
||||
"url_private_download": "https://files.slack.com/archive.zip",
|
||||
"size": 1024,
|
||||
}])
|
||||
await adapter._handle_slack_message(event)
|
||||
async def test_zip_file_cached(self, adapter):
|
||||
"""A .zip file should be cached as a supported document."""
|
||||
with patch.object(adapter, "_download_slack_file_bytes", new_callable=AsyncMock) as dl:
|
||||
dl.return_value = b"PK\x03\x04zip"
|
||||
event = self._make_event(files=[{
|
||||
"mimetype": "application/zip",
|
||||
"name": "archive.zip",
|
||||
"url_private_download": "https://files.slack.com/archive.zip",
|
||||
"size": 1024,
|
||||
}])
|
||||
await adapter._handle_slack_message(event)
|
||||
|
||||
msg_event = adapter.handle_message.call_args[0][0]
|
||||
assert msg_event.message_type == MessageType.TEXT
|
||||
assert len(msg_event.media_urls) == 0
|
||||
assert msg_event.message_type == MessageType.DOCUMENT
|
||||
assert len(msg_event.media_urls) == 1
|
||||
assert msg_event.media_types == ["application/zip"]
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_oversized_document_skipped(self, adapter):
|
||||
|
||||
@@ -236,15 +236,16 @@ class TestDocumentDownloadBlock:
|
||||
assert "Please summarize" in event.text
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_unsupported_type_rejected(self, adapter):
|
||||
async def test_zip_document_cached(self, adapter):
|
||||
"""A .zip upload should be cached as a supported document."""
|
||||
doc = _make_document(file_name="archive.zip", mime_type="application/zip", file_size=100)
|
||||
msg = _make_message(document=doc)
|
||||
update = _make_update(msg)
|
||||
|
||||
await adapter._handle_media_message(update, MagicMock())
|
||||
event = adapter.handle_message.call_args[0][0]
|
||||
assert "Unsupported document type" in event.text
|
||||
assert ".zip" in event.text
|
||||
assert event.media_urls and event.media_urls[0].endswith("archive.zip")
|
||||
assert event.media_types == ["application/zip"]
|
||||
|
||||
@pytest.mark.asyncio
|
||||
async def test_oversized_file_rejected(self, adapter):
|
||||
|
||||
@@ -330,7 +330,7 @@ def test_model_flow_nous_prints_subscription_guidance_without_mutating_explicit_
|
||||
"hermes_cli.auth.fetch_nous_models",
|
||||
lambda *args, **kwargs: ["claude-opus-4-6"],
|
||||
)
|
||||
monkeypatch.setattr("hermes_cli.auth._prompt_model_selection", lambda model_ids, current_model="": "claude-opus-4-6")
|
||||
monkeypatch.setattr("hermes_cli.auth._prompt_model_selection", lambda model_ids, current_model="", pricing=None: "claude-opus-4-6")
|
||||
monkeypatch.setattr("hermes_cli.auth._save_model_choice", lambda model: None)
|
||||
monkeypatch.setattr("hermes_cli.auth._update_config_for_provider", lambda provider, url: None)
|
||||
monkeypatch.setattr(
|
||||
@@ -368,7 +368,7 @@ def test_model_flow_nous_applies_managed_tts_default_when_unconfigured(monkeypat
|
||||
"hermes_cli.auth.fetch_nous_models",
|
||||
lambda *args, **kwargs: ["claude-opus-4-6"],
|
||||
)
|
||||
monkeypatch.setattr("hermes_cli.auth._prompt_model_selection", lambda model_ids, current_model="": "claude-opus-4-6")
|
||||
monkeypatch.setattr("hermes_cli.auth._prompt_model_selection", lambda model_ids, current_model="", pricing=None: "claude-opus-4-6")
|
||||
monkeypatch.setattr("hermes_cli.auth._save_model_choice", lambda model: None)
|
||||
monkeypatch.setattr("hermes_cli.auth._update_config_for_provider", lambda provider, url: None)
|
||||
monkeypatch.setattr(
|
||||
|
||||
@@ -10,7 +10,9 @@ import pytest
|
||||
from tools.credential_files import (
|
||||
clear_credential_files,
|
||||
get_credential_file_mounts,
|
||||
get_cache_directory_mounts,
|
||||
get_skills_directory_mount,
|
||||
iter_cache_files,
|
||||
iter_skills_files,
|
||||
register_credential_file,
|
||||
register_credential_files,
|
||||
@@ -358,3 +360,116 @@ class TestConfigPathTraversal:
|
||||
mounts = get_credential_file_mounts()
|
||||
assert len(mounts) == 1
|
||||
assert "oauth.json" in mounts[0]["container_path"]
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Cache directory mounts
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
class TestCacheDirectoryMounts:
|
||||
"""Tests for get_cache_directory_mounts() and iter_cache_files()."""
|
||||
|
||||
def test_returns_existing_cache_dirs(self, tmp_path, monkeypatch):
|
||||
"""Existing cache dirs are returned with correct container paths."""
|
||||
hermes_home = tmp_path / ".hermes"
|
||||
hermes_home.mkdir()
|
||||
(hermes_home / "cache" / "documents").mkdir(parents=True)
|
||||
(hermes_home / "cache" / "audio").mkdir(parents=True)
|
||||
monkeypatch.setenv("HERMES_HOME", str(hermes_home))
|
||||
|
||||
mounts = get_cache_directory_mounts()
|
||||
paths = {m["container_path"] for m in mounts}
|
||||
assert "/root/.hermes/cache/documents" in paths
|
||||
assert "/root/.hermes/cache/audio" in paths
|
||||
|
||||
def test_skips_nonexistent_dirs(self, tmp_path, monkeypatch):
|
||||
"""Dirs that don't exist on disk are not returned."""
|
||||
hermes_home = tmp_path / ".hermes"
|
||||
hermes_home.mkdir()
|
||||
# Create only one cache dir
|
||||
(hermes_home / "cache" / "documents").mkdir(parents=True)
|
||||
monkeypatch.setenv("HERMES_HOME", str(hermes_home))
|
||||
|
||||
mounts = get_cache_directory_mounts()
|
||||
assert len(mounts) == 1
|
||||
assert mounts[0]["container_path"] == "/root/.hermes/cache/documents"
|
||||
|
||||
def test_legacy_dir_names_resolved(self, tmp_path, monkeypatch):
|
||||
"""Old-style dir names (e.g. document_cache) are resolved correctly."""
|
||||
hermes_home = tmp_path / ".hermes"
|
||||
hermes_home.mkdir()
|
||||
# Use legacy dir name — get_hermes_dir prefers old if it exists
|
||||
(hermes_home / "document_cache").mkdir()
|
||||
(hermes_home / "image_cache").mkdir()
|
||||
monkeypatch.setenv("HERMES_HOME", str(hermes_home))
|
||||
|
||||
mounts = get_cache_directory_mounts()
|
||||
host_paths = {m["host_path"] for m in mounts}
|
||||
assert str(hermes_home / "document_cache") in host_paths
|
||||
assert str(hermes_home / "image_cache") in host_paths
|
||||
# Container paths always use the new layout
|
||||
container_paths = {m["container_path"] for m in mounts}
|
||||
assert "/root/.hermes/cache/documents" in container_paths
|
||||
assert "/root/.hermes/cache/images" in container_paths
|
||||
|
||||
def test_empty_hermes_home(self, tmp_path, monkeypatch):
|
||||
"""No cache dirs → empty list."""
|
||||
hermes_home = tmp_path / ".hermes"
|
||||
hermes_home.mkdir()
|
||||
monkeypatch.setenv("HERMES_HOME", str(hermes_home))
|
||||
|
||||
assert get_cache_directory_mounts() == []
|
||||
|
||||
|
||||
class TestIterCacheFiles:
|
||||
"""Tests for iter_cache_files()."""
|
||||
|
||||
def test_enumerates_files(self, tmp_path, monkeypatch):
|
||||
"""Regular files in cache dirs are returned."""
|
||||
hermes_home = tmp_path / ".hermes"
|
||||
doc_dir = hermes_home / "cache" / "documents"
|
||||
doc_dir.mkdir(parents=True)
|
||||
(doc_dir / "upload.zip").write_bytes(b"PK\x03\x04")
|
||||
(doc_dir / "report.pdf").write_bytes(b"%PDF-1.4")
|
||||
monkeypatch.setenv("HERMES_HOME", str(hermes_home))
|
||||
|
||||
entries = iter_cache_files()
|
||||
names = {Path(e["container_path"]).name for e in entries}
|
||||
assert "upload.zip" in names
|
||||
assert "report.pdf" in names
|
||||
|
||||
def test_skips_symlinks(self, tmp_path, monkeypatch):
|
||||
"""Symlinks inside cache dirs are skipped."""
|
||||
hermes_home = tmp_path / ".hermes"
|
||||
doc_dir = hermes_home / "cache" / "documents"
|
||||
doc_dir.mkdir(parents=True)
|
||||
real_file = doc_dir / "real.txt"
|
||||
real_file.write_text("content")
|
||||
(doc_dir / "link.txt").symlink_to(real_file)
|
||||
monkeypatch.setenv("HERMES_HOME", str(hermes_home))
|
||||
|
||||
entries = iter_cache_files()
|
||||
names = [Path(e["container_path"]).name for e in entries]
|
||||
assert "real.txt" in names
|
||||
assert "link.txt" not in names
|
||||
|
||||
def test_nested_files(self, tmp_path, monkeypatch):
|
||||
"""Files in subdirectories are included with correct relative paths."""
|
||||
hermes_home = tmp_path / ".hermes"
|
||||
ss_dir = hermes_home / "cache" / "screenshots"
|
||||
sub = ss_dir / "session_abc"
|
||||
sub.mkdir(parents=True)
|
||||
(sub / "screen1.png").write_bytes(b"PNG")
|
||||
monkeypatch.setenv("HERMES_HOME", str(hermes_home))
|
||||
|
||||
entries = iter_cache_files()
|
||||
assert len(entries) == 1
|
||||
assert entries[0]["container_path"] == "/root/.hermes/cache/screenshots/session_abc/screen1.png"
|
||||
|
||||
def test_empty_cache(self, tmp_path, monkeypatch):
|
||||
"""No cache dirs → empty list."""
|
||||
hermes_home = tmp_path / ".hermes"
|
||||
hermes_home.mkdir()
|
||||
monkeypatch.setenv("HERMES_HOME", str(hermes_home))
|
||||
|
||||
assert iter_cache_files() == []
|
||||
|
||||
@@ -93,6 +93,7 @@ class TestScanMemoryContent:
|
||||
def store(tmp_path, monkeypatch):
|
||||
"""Create a MemoryStore with temp storage."""
|
||||
monkeypatch.setattr("tools.memory_tool.MEMORY_DIR", tmp_path)
|
||||
monkeypatch.setattr("tools.memory_tool.get_memory_dir", lambda: tmp_path)
|
||||
s = MemoryStore(memory_char_limit=500, user_char_limit=300)
|
||||
s.load_from_disk()
|
||||
return s
|
||||
@@ -186,6 +187,7 @@ class TestMemoryStoreRemove:
|
||||
class TestMemoryStorePersistence:
|
||||
def test_save_and_load_roundtrip(self, tmp_path, monkeypatch):
|
||||
monkeypatch.setattr("tools.memory_tool.MEMORY_DIR", tmp_path)
|
||||
monkeypatch.setattr("tools.memory_tool.get_memory_dir", lambda: tmp_path)
|
||||
|
||||
store1 = MemoryStore()
|
||||
store1.load_from_disk()
|
||||
@@ -199,6 +201,7 @@ class TestMemoryStorePersistence:
|
||||
|
||||
def test_deduplication_on_load(self, tmp_path, monkeypatch):
|
||||
monkeypatch.setattr("tools.memory_tool.MEMORY_DIR", tmp_path)
|
||||
monkeypatch.setattr("tools.memory_tool.get_memory_dir", lambda: tmp_path)
|
||||
# Write file with duplicates
|
||||
mem_file = tmp_path / "MEMORY.md"
|
||||
mem_file.write_text("duplicate entry\n§\nduplicate entry\n§\nunique entry")
|
||||
|
||||
@@ -65,6 +65,7 @@ import requests
|
||||
from typing import Dict, Any, Optional, List
|
||||
from pathlib import Path
|
||||
from agent.auxiliary_client import call_llm
|
||||
from hermes_constants import get_hermes_home
|
||||
|
||||
try:
|
||||
from tools.website_policy import check_website_access
|
||||
@@ -144,7 +145,7 @@ def _get_command_timeout() -> int:
|
||||
``DEFAULT_COMMAND_TIMEOUT`` (30s) if unset or unreadable.
|
||||
"""
|
||||
try:
|
||||
hermes_home = Path(os.environ.get("HERMES_HOME", Path.home() / ".hermes"))
|
||||
hermes_home = get_hermes_home()
|
||||
config_path = hermes_home / "config.yaml"
|
||||
if config_path.exists():
|
||||
import yaml
|
||||
@@ -256,7 +257,7 @@ def _get_cloud_provider() -> Optional[CloudBrowserProvider]:
|
||||
|
||||
_cloud_provider_resolved = True
|
||||
try:
|
||||
hermes_home = Path(os.environ.get("HERMES_HOME", Path.home() / ".hermes"))
|
||||
hermes_home = get_hermes_home()
|
||||
config_path = hermes_home / "config.yaml"
|
||||
if config_path.exists():
|
||||
import yaml
|
||||
@@ -327,7 +328,7 @@ def _allow_private_urls() -> bool:
|
||||
_allow_private_urls_resolved = True
|
||||
_cached_allow_private_urls = False # safe default
|
||||
try:
|
||||
hermes_home = Path(os.environ.get("HERMES_HOME", Path.home() / ".hermes"))
|
||||
hermes_home = get_hermes_home()
|
||||
config_path = hermes_home / "config.yaml"
|
||||
if config_path.exists():
|
||||
import yaml
|
||||
@@ -777,7 +778,7 @@ def _find_agent_browser() -> str:
|
||||
extra_dirs.append(d)
|
||||
extra_dirs.extend(_discover_homebrew_node_dirs())
|
||||
|
||||
hermes_home = Path(os.environ.get("HERMES_HOME", Path.home() / ".hermes"))
|
||||
hermes_home = get_hermes_home()
|
||||
hermes_node_bin = str(hermes_home / "node" / "bin")
|
||||
if os.path.isdir(hermes_node_bin):
|
||||
extra_dirs.append(hermes_node_bin)
|
||||
@@ -904,7 +905,7 @@ def _run_browser_command(
|
||||
|
||||
# Ensure PATH includes Hermes-managed Node first, Homebrew versioned
|
||||
# node dirs (for macOS ``brew install node@24``), then standard system dirs.
|
||||
hermes_home = Path(os.environ.get("HERMES_HOME", Path.home() / ".hermes"))
|
||||
hermes_home = get_hermes_home()
|
||||
hermes_node_bin = str(hermes_home / "node" / "bin")
|
||||
|
||||
existing_path = browser_env.get("PATH", "")
|
||||
@@ -1541,7 +1542,7 @@ def _maybe_start_recording(task_id: str):
|
||||
if task_id in _recording_sessions:
|
||||
return
|
||||
try:
|
||||
hermes_home = Path(os.environ.get("HERMES_HOME", Path.home() / ".hermes"))
|
||||
hermes_home = get_hermes_home()
|
||||
config_path = hermes_home / "config.yaml"
|
||||
record_enabled = False
|
||||
if config_path.exists():
|
||||
@@ -1830,7 +1831,7 @@ def _cleanup_old_recordings(max_age_hours=72):
|
||||
"""Remove browser recordings older than max_age_hours to prevent disk bloat."""
|
||||
import time
|
||||
try:
|
||||
hermes_home = Path(os.environ.get("HERMES_HOME", Path.home() / ".hermes"))
|
||||
hermes_home = get_hermes_home()
|
||||
recordings_dir = hermes_home / "browser_recordings"
|
||||
if not recordings_dir.exists():
|
||||
return
|
||||
|
||||
@@ -1,29 +1,21 @@
|
||||
"""Credential file passthrough registry for remote terminal backends.
|
||||
"""File passthrough registry for remote terminal backends.
|
||||
|
||||
Skills that declare ``required_credential_files`` in their frontmatter need
|
||||
those files available inside sandboxed execution environments (Modal, Docker).
|
||||
By default remote backends create bare containers with no host files.
|
||||
Remote backends (Docker, Modal, SSH) create sandboxes with no host files.
|
||||
This module ensures that credential files, skill directories, and host-side
|
||||
cache directories (documents, images, audio, screenshots) are mounted or
|
||||
synced into those sandboxes so the agent can access them.
|
||||
|
||||
This module provides a session-scoped registry so skill-declared credential
|
||||
files (and user-configured overrides) are mounted into remote sandboxes.
|
||||
**Credentials and skills** — session-scoped registry fed by skill declarations
|
||||
(``required_credential_files``) and user config (``terminal.credential_files``).
|
||||
|
||||
Two sources feed the registry:
|
||||
**Cache directories** — gateway-cached uploads, browser screenshots, TTS
|
||||
audio, and processed images. Mounted read-only so the remote terminal can
|
||||
reference files the host side created (e.g. ``unzip`` an uploaded archive).
|
||||
|
||||
1. **Skill declarations** — when a skill is loaded via ``skill_view``, its
|
||||
``required_credential_files`` entries are registered here if the files
|
||||
exist on the host.
|
||||
2. **User config** — ``terminal.credential_files`` in config.yaml lets users
|
||||
explicitly list additional files to mount.
|
||||
|
||||
Remote backends (``tools/environments/modal.py``, ``docker.py``) call
|
||||
:func:`get_credential_file_mounts` at sandbox creation time.
|
||||
|
||||
Each registered entry is a dict::
|
||||
|
||||
{
|
||||
"host_path": "/home/user/.hermes/google_token.json",
|
||||
"container_path": "/root/.hermes/google_token.json",
|
||||
}
|
||||
Remote backends call :func:`get_credential_file_mounts`,
|
||||
:func:`get_skills_directory_mount` / :func:`iter_skills_files`, and
|
||||
:func:`get_cache_directory_mounts` / :func:`iter_cache_files` at sandbox
|
||||
creation time and before each command (for resync on Modal).
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
@@ -300,6 +292,71 @@ def iter_skills_files(
|
||||
return result
|
||||
|
||||
|
||||
# ---------------------------------------------------------------------------
|
||||
# Cache directory mounts (documents, images, audio, screenshots)
|
||||
# ---------------------------------------------------------------------------
|
||||
|
||||
# The four cache subdirectories that should be mirrored into remote backends.
|
||||
# Each tuple is (new_subpath, old_name) matching hermes_constants.get_hermes_dir().
|
||||
_CACHE_DIRS: list[tuple[str, str]] = [
|
||||
("cache/documents", "document_cache"),
|
||||
("cache/images", "image_cache"),
|
||||
("cache/audio", "audio_cache"),
|
||||
("cache/screenshots", "browser_screenshots"),
|
||||
]
|
||||
|
||||
|
||||
def get_cache_directory_mounts(
|
||||
container_base: str = "/root/.hermes",
|
||||
) -> List[Dict[str, str]]:
|
||||
"""Return mount entries for each cache directory that exists on disk.
|
||||
|
||||
Used by Docker to create bind mounts. Each entry has ``host_path`` and
|
||||
``container_path`` keys. The host path is resolved via
|
||||
``get_hermes_dir()`` for backward compatibility with old directory layouts.
|
||||
"""
|
||||
from hermes_constants import get_hermes_dir
|
||||
|
||||
mounts: List[Dict[str, str]] = []
|
||||
for new_subpath, old_name in _CACHE_DIRS:
|
||||
host_dir = get_hermes_dir(new_subpath, old_name)
|
||||
if host_dir.is_dir():
|
||||
# Always map to the *new* container layout regardless of host layout.
|
||||
container_path = f"{container_base.rstrip('/')}/{new_subpath}"
|
||||
mounts.append({
|
||||
"host_path": str(host_dir),
|
||||
"container_path": container_path,
|
||||
})
|
||||
return mounts
|
||||
|
||||
|
||||
def iter_cache_files(
|
||||
container_base: str = "/root/.hermes",
|
||||
) -> List[Dict[str, str]]:
|
||||
"""Return individual (host_path, container_path) entries for cache files.
|
||||
|
||||
Used by Modal to upload files individually and resync before each command.
|
||||
Skips symlinks. The container paths use the new ``cache/<subdir>`` layout.
|
||||
"""
|
||||
from hermes_constants import get_hermes_dir
|
||||
|
||||
result: List[Dict[str, str]] = []
|
||||
for new_subpath, old_name in _CACHE_DIRS:
|
||||
host_dir = get_hermes_dir(new_subpath, old_name)
|
||||
if not host_dir.is_dir():
|
||||
continue
|
||||
container_root = f"{container_base.rstrip('/')}/{new_subpath}"
|
||||
for item in host_dir.rglob("*"):
|
||||
if item.is_symlink() or not item.is_file():
|
||||
continue
|
||||
rel = item.relative_to(host_dir)
|
||||
result.append({
|
||||
"host_path": str(item),
|
||||
"container_path": f"{container_root}/{rel}",
|
||||
})
|
||||
return result
|
||||
|
||||
|
||||
def clear_credential_files() -> None:
|
||||
"""Reset the skill-scoped registry (e.g. on session reset)."""
|
||||
_registered_files.clear()
|
||||
|
||||
@@ -563,7 +563,7 @@ def delegate_task(
|
||||
if parent_agent and hasattr(parent_agent, '_memory_manager') and parent_agent._memory_manager:
|
||||
for entry in results:
|
||||
try:
|
||||
_task_goal = tasks[entry["task_index"]]["goal"] if entry["task_index"] < len(tasks) else ""
|
||||
_task_goal = task_list[entry["task_index"]]["goal"] if entry["task_index"] < len(task_list) else ""
|
||||
parent_agent._memory_manager.on_delegation(
|
||||
task=_task_goal,
|
||||
result=entry.get("summary", "") or "",
|
||||
|
||||
@@ -315,7 +315,11 @@ class DockerEnvironment(BaseEnvironment):
|
||||
# Mount credential files (OAuth tokens, etc.) declared by skills.
|
||||
# Read-only so the container can authenticate but not modify host creds.
|
||||
try:
|
||||
from tools.credential_files import get_credential_file_mounts, get_skills_directory_mount
|
||||
from tools.credential_files import (
|
||||
get_credential_file_mounts,
|
||||
get_skills_directory_mount,
|
||||
get_cache_directory_mounts,
|
||||
)
|
||||
|
||||
for mount_entry in get_credential_file_mounts():
|
||||
volume_args.extend([
|
||||
@@ -341,6 +345,21 @@ class DockerEnvironment(BaseEnvironment):
|
||||
skills_mount["host_path"],
|
||||
skills_mount["container_path"],
|
||||
)
|
||||
|
||||
# Mount host-side cache directories (documents, images, audio,
|
||||
# screenshots) so the agent can access uploaded files and other
|
||||
# cached media from inside the container. Read-only — the
|
||||
# container reads these but the host gateway manages writes.
|
||||
for cache_mount in get_cache_directory_mounts():
|
||||
volume_args.extend([
|
||||
"-v",
|
||||
f"{cache_mount['host_path']}:{cache_mount['container_path']}:ro",
|
||||
])
|
||||
logger.info(
|
||||
"Docker: mounting cache dir %s -> %s",
|
||||
cache_mount["host_path"],
|
||||
cache_mount["container_path"],
|
||||
)
|
||||
except Exception as e:
|
||||
logger.debug("Docker: could not load credential file mounts: %s", e)
|
||||
|
||||
|
||||
@@ -186,7 +186,11 @@ class ModalEnvironment(BaseModalExecutionEnvironment):
|
||||
|
||||
cred_mounts = []
|
||||
try:
|
||||
from tools.credential_files import get_credential_file_mounts, iter_skills_files
|
||||
from tools.credential_files import (
|
||||
get_credential_file_mounts,
|
||||
iter_skills_files,
|
||||
iter_cache_files,
|
||||
)
|
||||
|
||||
for mount_entry in get_credential_file_mounts():
|
||||
cred_mounts.append(
|
||||
@@ -212,6 +216,20 @@ class ModalEnvironment(BaseModalExecutionEnvironment):
|
||||
)
|
||||
if skills_files:
|
||||
logger.info("Modal: mounting %d skill files", len(skills_files))
|
||||
|
||||
# Mount host-side cache files (documents, images, audio,
|
||||
# screenshots). New files arriving mid-session are picked up
|
||||
# by _sync_files() before each command execution.
|
||||
cache_files = iter_cache_files()
|
||||
for entry in cache_files:
|
||||
cred_mounts.append(
|
||||
_modal.Mount.from_local_file(
|
||||
entry["host_path"],
|
||||
remote_path=entry["container_path"],
|
||||
)
|
||||
)
|
||||
if cache_files:
|
||||
logger.info("Modal: mounting %d cache files", len(cache_files))
|
||||
except Exception as e:
|
||||
logger.debug("Modal: could not load credential file mounts: %s", e)
|
||||
|
||||
@@ -308,13 +326,19 @@ class ModalEnvironment(BaseModalExecutionEnvironment):
|
||||
return True
|
||||
|
||||
def _sync_files(self) -> None:
|
||||
"""Push credential files and skill files into the running sandbox.
|
||||
"""Push credential, skill, and cache files into the running sandbox.
|
||||
|
||||
Runs before each command. Uses mtime+size caching so only changed
|
||||
files are pushed (~13μs overhead in the no-op case).
|
||||
files are pushed (~13μs overhead in the no-op case). Cache files
|
||||
are especially important here — new uploads/screenshots may appear
|
||||
mid-session after sandbox creation.
|
||||
"""
|
||||
try:
|
||||
from tools.credential_files import get_credential_file_mounts, iter_skills_files
|
||||
from tools.credential_files import (
|
||||
get_credential_file_mounts,
|
||||
iter_skills_files,
|
||||
iter_cache_files,
|
||||
)
|
||||
|
||||
for entry in get_credential_file_mounts():
|
||||
if self._push_file_to_sandbox(entry["host_path"], entry["container_path"]):
|
||||
@@ -323,6 +347,10 @@ class ModalEnvironment(BaseModalExecutionEnvironment):
|
||||
for entry in iter_skills_files():
|
||||
if self._push_file_to_sandbox(entry["host_path"], entry["container_path"]):
|
||||
logger.debug("Modal: synced skill file %s", entry["container_path"])
|
||||
|
||||
for entry in iter_cache_files():
|
||||
if self._push_file_to_sandbox(entry["host_path"], entry["container_path"]):
|
||||
logger.debug("Modal: synced cache file %s", entry["container_path"])
|
||||
except Exception as e:
|
||||
logger.debug("Modal: file sync failed: %s", e)
|
||||
|
||||
|
||||
@@ -36,8 +36,18 @@ from typing import Dict, Any, List, Optional
|
||||
|
||||
logger = logging.getLogger(__name__)
|
||||
|
||||
# Where memory files live
|
||||
MEMORY_DIR = get_hermes_home() / "memories"
|
||||
# Where memory files live — resolved dynamically so profile overrides
|
||||
# (HERMES_HOME env var changes) are always respected. The old module-level
|
||||
# constant was cached at import time and could go stale if a profile switch
|
||||
# happened after the first import.
|
||||
def get_memory_dir() -> Path:
|
||||
"""Return the profile-scoped memories directory."""
|
||||
return get_hermes_home() / "memories"
|
||||
|
||||
# Backward-compatible alias — gateway/run.py imports this at runtime inside
|
||||
# a function body, so it gets the correct snapshot for that process. New code
|
||||
# should prefer get_memory_dir().
|
||||
MEMORY_DIR = get_memory_dir()
|
||||
|
||||
ENTRY_DELIMITER = "\n§\n"
|
||||
|
||||
@@ -108,10 +118,11 @@ class MemoryStore:
|
||||
|
||||
def load_from_disk(self):
|
||||
"""Load entries from MEMORY.md and USER.md, capture system prompt snapshot."""
|
||||
MEMORY_DIR.mkdir(parents=True, exist_ok=True)
|
||||
mem_dir = get_memory_dir()
|
||||
mem_dir.mkdir(parents=True, exist_ok=True)
|
||||
|
||||
self.memory_entries = self._read_file(MEMORY_DIR / "MEMORY.md")
|
||||
self.user_entries = self._read_file(MEMORY_DIR / "USER.md")
|
||||
self.memory_entries = self._read_file(mem_dir / "MEMORY.md")
|
||||
self.user_entries = self._read_file(mem_dir / "USER.md")
|
||||
|
||||
# Deduplicate entries (preserves order, keeps first occurrence)
|
||||
self.memory_entries = list(dict.fromkeys(self.memory_entries))
|
||||
@@ -143,9 +154,10 @@ class MemoryStore:
|
||||
|
||||
@staticmethod
|
||||
def _path_for(target: str) -> Path:
|
||||
mem_dir = get_memory_dir()
|
||||
if target == "user":
|
||||
return MEMORY_DIR / "USER.md"
|
||||
return MEMORY_DIR / "MEMORY.md"
|
||||
return mem_dir / "USER.md"
|
||||
return mem_dir / "MEMORY.md"
|
||||
|
||||
def _reload_target(self, target: str):
|
||||
"""Re-read entries from disk into in-memory state.
|
||||
@@ -158,7 +170,7 @@ class MemoryStore:
|
||||
|
||||
def save_to_disk(self, target: str):
|
||||
"""Persist entries to the appropriate file. Called after every mutation."""
|
||||
MEMORY_DIR.mkdir(parents=True, exist_ok=True)
|
||||
get_memory_dir().mkdir(parents=True, exist_ok=True)
|
||||
self._write_file(self._path_for(target), self._entries_for(target))
|
||||
|
||||
def _entries_for(self, target: str) -> List[str]:
|
||||
|
||||
@@ -788,6 +788,15 @@ Create a single, unified markdown summary."""
|
||||
logger.warning("Synthesis LLM returned empty content, retrying once")
|
||||
response = await async_call_llm(**call_kwargs)
|
||||
final_summary = extract_content_or_reasoning(response)
|
||||
|
||||
# If still None after retry, fall back to concatenated summaries
|
||||
if not final_summary:
|
||||
logger.warning("Synthesis failed after retry — concatenating chunk summaries")
|
||||
fallback = "\n\n".join(summaries)
|
||||
if len(fallback) > max_output_size:
|
||||
fallback = fallback[:max_output_size] + "\n\n[... truncated ...]"
|
||||
return fallback
|
||||
|
||||
# Enforce hard cap
|
||||
if len(final_summary) > max_output_size:
|
||||
final_summary = final_summary[:max_output_size] + "\n\n[... summary truncated for context management ...]"
|
||||
|
||||
90
uv.lock
generated
90
uv.lock
generated
@@ -1017,6 +1017,31 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/c6/45/e6dd0c6c740c67c07474f2eb5175bb5656598488db444c4abd2a4e948393/daytona_toolbox_api_client_async-0.155.0-py3-none-any.whl", hash = "sha256:6ecf6351a31686d8e33ff054db69e279c45b574018b6c9a1cae15a7940412951", size = 176355, upload-time = "2026-03-24T14:47:36.327Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "debugpy"
|
||||
version = "1.8.20"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/e0/b7/cd8080344452e4874aae67c40d8940e2b4d47b01601a8fd9f44786c757c7/debugpy-1.8.20.tar.gz", hash = "sha256:55bc8701714969f1ab89a6d5f2f3d40c36f91b2cbe2f65d98bf8196f6a6a2c33", size = 1645207, upload-time = "2026-01-29T23:03:28.199Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/51/56/c3baf5cbe4dd77427fd9aef99fcdade259ad128feeb8a786c246adb838e5/debugpy-1.8.20-cp311-cp311-macosx_15_0_universal2.whl", hash = "sha256:eada6042ad88fa1571b74bd5402ee8b86eded7a8f7b827849761700aff171f1b", size = 2208318, upload-time = "2026-01-29T23:03:36.481Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/9a/7d/4fa79a57a8e69fe0d9763e98d1110320f9ecd7f1f362572e3aafd7417c9d/debugpy-1.8.20-cp311-cp311-manylinux_2_34_x86_64.whl", hash = "sha256:7de0b7dfeedc504421032afba845ae2a7bcc32ddfb07dae2c3ca5442f821c344", size = 3171493, upload-time = "2026-01-29T23:03:37.775Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/7d/f2/1e8f8affe51e12a26f3a8a8a4277d6e60aa89d0a66512f63b1e799d424a4/debugpy-1.8.20-cp311-cp311-win32.whl", hash = "sha256:773e839380cf459caf73cc533ea45ec2737a5cc184cf1b3b796cd4fd98504fec", size = 5209240, upload-time = "2026-01-29T23:03:39.109Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/d5/92/1cb532e88560cbee973396254b21bece8c5d7c2ece958a67afa08c9f10dc/debugpy-1.8.20-cp311-cp311-win_amd64.whl", hash = "sha256:1f7650546e0eded1902d0f6af28f787fa1f1dbdbc97ddabaf1cd963a405930cb", size = 5233481, upload-time = "2026-01-29T23:03:40.659Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/14/57/7f34f4736bfb6e00f2e4c96351b07805d83c9a7b33d28580ae01374430f7/debugpy-1.8.20-cp312-cp312-macosx_15_0_universal2.whl", hash = "sha256:4ae3135e2089905a916909ef31922b2d733d756f66d87345b3e5e52b7a55f13d", size = 2550686, upload-time = "2026-01-29T23:03:42.023Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/ab/78/b193a3975ca34458f6f0e24aaf5c3e3da72f5401f6054c0dfd004b41726f/debugpy-1.8.20-cp312-cp312-manylinux_2_34_x86_64.whl", hash = "sha256:88f47850a4284b88bd2bfee1f26132147d5d504e4e86c22485dfa44b97e19b4b", size = 4310588, upload-time = "2026-01-29T23:03:43.314Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c1/55/f14deb95eaf4f30f07ef4b90a8590fc05d9e04df85ee379712f6fb6736d7/debugpy-1.8.20-cp312-cp312-win32.whl", hash = "sha256:4057ac68f892064e5f98209ab582abfee3b543fb55d2e87610ddc133a954d390", size = 5331372, upload-time = "2026-01-29T23:03:45.526Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a1/39/2bef246368bd42f9bd7cba99844542b74b84dacbdbea0833e610f384fee8/debugpy-1.8.20-cp312-cp312-win_amd64.whl", hash = "sha256:a1a8f851e7cf171330679ef6997e9c579ef6dd33c9098458bd9986a0f4ca52e3", size = 5372835, upload-time = "2026-01-29T23:03:47.245Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/15/e2/fc500524cc6f104a9d049abc85a0a8b3f0d14c0a39b9c140511c61e5b40b/debugpy-1.8.20-cp313-cp313-macosx_15_0_universal2.whl", hash = "sha256:5dff4bb27027821fdfcc9e8f87309a28988231165147c31730128b1c983e282a", size = 2539560, upload-time = "2026-01-29T23:03:48.738Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/90/83/fb33dcea789ed6018f8da20c5a9bc9d82adc65c0c990faed43f7c955da46/debugpy-1.8.20-cp313-cp313-manylinux_2_34_x86_64.whl", hash = "sha256:84562982dd7cf5ebebfdea667ca20a064e096099997b175fe204e86817f64eaf", size = 4293272, upload-time = "2026-01-29T23:03:50.169Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/a6/25/b1e4a01bfb824d79a6af24b99ef291e24189080c93576dfd9b1a2815cd0f/debugpy-1.8.20-cp313-cp313-win32.whl", hash = "sha256:da11dea6447b2cadbf8ce2bec59ecea87cc18d2c574980f643f2d2dfe4862393", size = 5331208, upload-time = "2026-01-29T23:03:51.547Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/13/f7/a0b368ce54ffff9e9028c098bd2d28cfc5b54f9f6c186929083d4c60ba58/debugpy-1.8.20-cp313-cp313-win_amd64.whl", hash = "sha256:eb506e45943cab2efb7c6eafdd65b842f3ae779f020c82221f55aca9de135ed7", size = 5372930, upload-time = "2026-01-29T23:03:53.585Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/33/2e/f6cb9a8a13f5058f0a20fe09711a7b726232cd5a78c6a7c05b2ec726cff9/debugpy-1.8.20-cp314-cp314-macosx_15_0_universal2.whl", hash = "sha256:9c74df62fc064cd5e5eaca1353a3ef5a5d50da5eb8058fcef63106f7bebe6173", size = 2538066, upload-time = "2026-01-29T23:03:54.999Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c5/56/6ddca50b53624e1ca3ce1d1e49ff22db46c47ea5fb4c0cc5c9b90a616364/debugpy-1.8.20-cp314-cp314-manylinux_2_34_x86_64.whl", hash = "sha256:077a7447589ee9bc1ff0cdf443566d0ecf540ac8aa7333b775ebcb8ce9f4ecad", size = 4269425, upload-time = "2026-01-29T23:03:56.518Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/c5/d9/d64199c14a0d4c476df46c82470a3ce45c8d183a6796cfb5e66533b3663c/debugpy-1.8.20-cp314-cp314-win32.whl", hash = "sha256:352036a99dd35053b37b7803f748efc456076f929c6a895556932eaf2d23b07f", size = 5331407, upload-time = "2026-01-29T23:03:58.481Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e0/d9/1f07395b54413432624d61524dfd98c1a7c7827d2abfdb8829ac92638205/debugpy-1.8.20-cp314-cp314-win_amd64.whl", hash = "sha256:a98eec61135465b062846112e5ecf2eebb855305acc1dfbae43b72903b8ab5be", size = 5372521, upload-time = "2026-01-29T23:03:59.864Z" },
|
||||
{ url = "https://files.pythonhosted.org/packages/e0/c3/7f67dea8ccf8fdcb9c99033bbe3e90b9e7395415843accb81428c441be2d/debugpy-1.8.20-py2.py3-none-any.whl", hash = "sha256:5be9bed9ae3be00665a06acaa48f8329d2b9632f15fd09f6a9a8c8d9907e54d7", size = 5337658, upload-time = "2026-01-29T23:04:17.404Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "deprecated"
|
||||
version = "1.3.1"
|
||||
@@ -1133,6 +1158,24 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/97/a8/c070e1340636acb38d4e6a7e45c46d168a462b48b9b3257e14ca0e5af79b/environs-14.6.0-py3-none-any.whl", hash = "sha256:f8fb3d6c6a55872b0c6db077a28f5a8c7b8984b7c32029613d44cef95cfc0812", size = 17205, upload-time = "2026-02-20T04:02:07.299Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "exa-py"
|
||||
version = "2.10.2"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "httpcore" },
|
||||
{ name = "httpx" },
|
||||
{ name = "openai" },
|
||||
{ name = "pydantic" },
|
||||
{ name = "python-dotenv" },
|
||||
{ name = "requests" },
|
||||
{ name = "typing-extensions" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/fe/4f/f06a6f277d668f143e330fe503b0027cc5fed753b22c3e161f8cbbccdf65/exa_py-2.10.2.tar.gz", hash = "sha256:f781f30b199f1102333384728adae64bb15a6bbcabfa97e91fd705f90acffc45", size = 53792, upload-time = "2026-03-26T20:29:35.764Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/e2/bc/7a34e904a415040ba626948d0b0a36a08cd073f12b13342578a68331be3c/exa_py-2.10.2-py3-none-any.whl", hash = "sha256:ecb2a7581f4b7a8aeb6b434acce1bbc40f92ed1d4126b2aa6029913acd904a47", size = 72248, upload-time = "2026-03-26T20:29:37.306Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "execnet"
|
||||
version = "2.1.2"
|
||||
@@ -1600,13 +1643,13 @@ wheels = [
|
||||
|
||||
[[package]]
|
||||
name = "hermes-agent"
|
||||
version = "0.5.0"
|
||||
version = "0.7.0"
|
||||
source = { editable = "." }
|
||||
dependencies = [
|
||||
{ name = "anthropic" },
|
||||
{ name = "edge-tts" },
|
||||
{ name = "exa-py" },
|
||||
{ name = "fal-client" },
|
||||
{ name = "faster-whisper" },
|
||||
{ name = "fire" },
|
||||
{ name = "firecrawl-py" },
|
||||
{ name = "httpx" },
|
||||
@@ -1632,10 +1675,13 @@ all = [
|
||||
{ name = "aiohttp" },
|
||||
{ name = "croniter" },
|
||||
{ name = "daytona" },
|
||||
{ name = "debugpy" },
|
||||
{ name = "dingtalk-stream" },
|
||||
{ name = "discord-py", extra = ["voice"] },
|
||||
{ name = "elevenlabs" },
|
||||
{ name = "faster-whisper" },
|
||||
{ name = "honcho-ai" },
|
||||
{ name = "lark-oapi" },
|
||||
{ name = "mcp" },
|
||||
{ name = "modal" },
|
||||
{ name = "numpy" },
|
||||
@@ -1660,6 +1706,7 @@ daytona = [
|
||||
{ name = "daytona" },
|
||||
]
|
||||
dev = [
|
||||
{ name = "debugpy" },
|
||||
{ name = "mcp" },
|
||||
{ name = "pytest" },
|
||||
{ name = "pytest-asyncio" },
|
||||
@@ -1668,6 +1715,9 @@ dev = [
|
||||
dingtalk = [
|
||||
{ name = "dingtalk-stream" },
|
||||
]
|
||||
feishu = [
|
||||
{ name = "lark-oapi" },
|
||||
]
|
||||
homeassistant = [
|
||||
{ name = "aiohttp" },
|
||||
]
|
||||
@@ -1712,6 +1762,7 @@ tts-premium = [
|
||||
{ name = "elevenlabs" },
|
||||
]
|
||||
voice = [
|
||||
{ name = "faster-whisper" },
|
||||
{ name = "numpy" },
|
||||
{ name = "sounddevice" },
|
||||
]
|
||||
@@ -1729,13 +1780,15 @@ requires-dist = [
|
||||
{ name = "atroposlib", marker = "extra == 'rl'", git = "https://github.com/NousResearch/atropos.git" },
|
||||
{ name = "croniter", marker = "extra == 'cron'", specifier = ">=6.0.0,<7" },
|
||||
{ name = "daytona", marker = "extra == 'daytona'", specifier = ">=0.148.0,<1" },
|
||||
{ name = "debugpy", marker = "extra == 'dev'", specifier = ">=1.8.0,<2" },
|
||||
{ name = "dingtalk-stream", marker = "extra == 'dingtalk'", specifier = ">=0.1.0,<1" },
|
||||
{ name = "discord-py", extras = ["voice"], marker = "extra == 'messaging'", specifier = ">=2.7.1,<3" },
|
||||
{ name = "edge-tts", specifier = ">=7.2.7,<8" },
|
||||
{ name = "elevenlabs", marker = "extra == 'tts-premium'", specifier = ">=1.0,<2" },
|
||||
{ name = "exa-py", specifier = ">=2.9.0,<3" },
|
||||
{ name = "fal-client", specifier = ">=0.13.1,<1" },
|
||||
{ name = "fastapi", marker = "extra == 'rl'", specifier = ">=0.104.0,<1" },
|
||||
{ name = "faster-whisper", specifier = ">=1.0.0,<2" },
|
||||
{ name = "faster-whisper", marker = "extra == 'voice'", specifier = ">=1.0.0,<2" },
|
||||
{ name = "fire", specifier = ">=0.7.1,<1" },
|
||||
{ name = "firecrawl-py", specifier = ">=4.16.0,<5" },
|
||||
{ name = "hermes-agent", extras = ["acp"], marker = "extra == 'all'" },
|
||||
@@ -1744,6 +1797,7 @@ requires-dist = [
|
||||
{ name = "hermes-agent", extras = ["daytona"], marker = "extra == 'all'" },
|
||||
{ name = "hermes-agent", extras = ["dev"], marker = "extra == 'all'" },
|
||||
{ name = "hermes-agent", extras = ["dingtalk"], marker = "extra == 'all'" },
|
||||
{ name = "hermes-agent", extras = ["feishu"], marker = "extra == 'all'" },
|
||||
{ name = "hermes-agent", extras = ["homeassistant"], marker = "extra == 'all'" },
|
||||
{ name = "hermes-agent", extras = ["honcho"], marker = "extra == 'all'" },
|
||||
{ name = "hermes-agent", extras = ["mcp"], marker = "extra == 'all'" },
|
||||
@@ -1757,6 +1811,7 @@ requires-dist = [
|
||||
{ name = "honcho-ai", marker = "extra == 'honcho'", specifier = ">=2.0.1,<3" },
|
||||
{ name = "httpx", specifier = ">=0.28.1,<1" },
|
||||
{ name = "jinja2", specifier = ">=3.1.5,<4" },
|
||||
{ name = "lark-oapi", marker = "extra == 'feishu'", specifier = ">=1.5.3,<2" },
|
||||
{ name = "matrix-nio", extras = ["e2e"], marker = "extra == 'matrix'", specifier = ">=0.24.0,<1" },
|
||||
{ name = "mcp", marker = "extra == 'dev'", specifier = ">=1.2.0,<2" },
|
||||
{ name = "mcp", marker = "extra == 'mcp'", specifier = ">=1.2.0,<2" },
|
||||
@@ -1789,7 +1844,7 @@ requires-dist = [
|
||||
{ name = "wandb", marker = "extra == 'rl'", specifier = ">=0.15.0,<1" },
|
||||
{ name = "yc-bench", marker = "python_full_version >= '3.12' and extra == 'yc-bench'", git = "https://github.com/collinear-ai/yc-bench.git" },
|
||||
]
|
||||
provides-extras = ["modal", "daytona", "dev", "messaging", "cron", "slack", "matrix", "cli", "tts-premium", "voice", "pty", "honcho", "mcp", "homeassistant", "sms", "acp", "dingtalk", "rl", "yc-bench", "all"]
|
||||
provides-extras = ["modal", "daytona", "dev", "messaging", "cron", "slack", "matrix", "cli", "tts-premium", "voice", "pty", "honcho", "mcp", "homeassistant", "sms", "acp", "dingtalk", "feishu", "rl", "yc-bench", "all"]
|
||||
|
||||
[[package]]
|
||||
name = "hf-transfer"
|
||||
@@ -2267,6 +2322,21 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/0a/dd/8050c947d435c8d4bc94e3252f4d8bb8a76cfb424f043a8680be637a57f1/kiwisolver-1.5.0-pp311-pypy311_pp73-win_amd64.whl", hash = "sha256:59cd8683f575d96df5bb48f6add94afc055012c29e28124fcae2b63661b9efb1", size = 73558, upload-time = "2026-03-09T13:15:52.112Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "lark-oapi"
|
||||
version = "1.5.3"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "httpx" },
|
||||
{ name = "pycryptodome" },
|
||||
{ name = "requests" },
|
||||
{ name = "requests-toolbelt" },
|
||||
{ name = "websockets" },
|
||||
]
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/bf/ff/2ece5d735ebfa2af600a53176f2636ae47af2bf934e08effab64f0d1e047/lark_oapi-1.5.3-py3-none-any.whl", hash = "sha256:fda6b32bb38d21b6bdaae94979c600b94c7c521e985adade63a54e4b3e20cc36", size = 6993016, upload-time = "2026-01-27T08:21:49.307Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "latex2sympy2-extended"
|
||||
version = "1.11.0"
|
||||
@@ -4122,6 +4192,18 @@ wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/56/5d/c814546c2333ceea4ba42262d8c4d55763003e767fa169adc693bd524478/requests-2.33.0-py3-none-any.whl", hash = "sha256:3324635456fa185245e24865e810cecec7b4caf933d7eb133dcde67d48cee69b", size = 65017, upload-time = "2026-03-25T15:10:40.382Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "requests-toolbelt"
|
||||
version = "1.0.0"
|
||||
source = { registry = "https://pypi.org/simple" }
|
||||
dependencies = [
|
||||
{ name = "requests" },
|
||||
]
|
||||
sdist = { url = "https://files.pythonhosted.org/packages/f3/61/d7545dafb7ac2230c70d38d31cbfe4cc64f7144dc41f6e4e4b78ecd9f5bb/requests-toolbelt-1.0.0.tar.gz", hash = "sha256:7681a0a3d047012b5bdc0ee37d7f8f07ebe76ab08caeccfc3921ce23c88d5bc6", size = 206888, upload-time = "2023-05-01T04:11:33.229Z" }
|
||||
wheels = [
|
||||
{ url = "https://files.pythonhosted.org/packages/3f/51/d4db610ef29373b879047326cbf6fa98b6c1969d6f6dc423279de2b1be2c/requests_toolbelt-1.0.0-py2.py3-none-any.whl", hash = "sha256:cccfdd665f0a24fcf4726e690f65639d272bb0637b9b92dfd91a5568ccf6bd06", size = 54481, upload-time = "2023-05-01T04:11:28.427Z" },
|
||||
]
|
||||
|
||||
[[package]]
|
||||
name = "rich"
|
||||
version = "14.3.3"
|
||||
|
||||
Reference in New Issue
Block a user