mirror of
https://github.com/NousResearch/hermes-agent.git
synced 2026-05-03 17:27:37 +08:00
The audit of v4.1 surfaced ~70 issues across the five scripts and three
reference docs — most user-visible (silent file overwrites, status-error
misclassified as success, X-API-Key leaked to S3 on /api/view redirect,
Cloud endpoints that 404 because they were renamed). v5.0.0 fixes those
and fills the gaps that previously forced users to write their own glue
(WebSocket monitoring, batch/sweep, img2img upload helper, dep auto-fix,
log fetch, health check, example workflows).
Critical fixes
- run_workflow.py: poll_status now checks status_str==error BEFORE
completed:true, so a failed run no longer reports success
- run_workflow.py: download_output streams to disk via safe_path_join,
preserves server subfolder structure (no silent overwrites), and
retries with exponential backoff
- run_workflow.py: refuses to overwrite a link with a literal in
inject_params (would silently break wiring)
- _common.py: _StripSensitiveOnRedirectSession (subclasses
requests.Session.rebuild_auth) drops X-API-Key/Cookie on cross-host
redirects — fixes a real key-leak path through Cloud's signed-URL
download flow. Tested
- Cloud routing (verified live): /history → /history_v2,
/models/<f> → /experiment/models/<f>, plus folder aliases for the
unet ↔ diffusion_models and clip ↔ text_encoders rename
- check_deps.py: distinguishes 200/empty vs 404 folder_not_found vs
403 free-tier; emits concrete fix_command per missing dep
- extract_schema.py: prompt vs negative_prompt determined by tracing
KSampler.{positive,negative} connections (incl. through Reroute /
Primitive nodes) instead of meta-title heuristic; symmetric
duplicate-name resolution; cycle-safe trace_to_node
- hardware_check.py: multi-GPU pick-best, Apple variant detection,
Rosetta detection, WSL2, ROCm --json, disk-space check, optional
PyTorch probe; powershell preferred over deprecated wmic
- comfyui_setup.sh: prefers pipx → uvx → pip --user (with PEP-668
fallback); idempotent — skips relaunch if server already up;
configurable port/workspace; persistent log; SIGINT trap
New scripts
- run_batch.py — count or sweep (cartesian product), parallel up to
cloud tier limit
- ws_monitor.py — real-time WebSocket viewer; saves preview frames
- auto_fix_deps.py — runs comfy node install / model download for
whatever check_deps reports missing (with --dry-run)
- health_check.py — single command that runs the verification checklist
(comfy-cli + server + checkpoints + optional smoke test that cancels
itself to avoid burning compute)
- fetch_logs.py — pull traceback / status messages for a prompt_id
Coverage expansion
- Param patterns now cover Flux (BasicScheduler, BasicGuider,
RandomNoise, ModelSamplingFlux), SD3, Wan/Hunyuan/LTX video,
IPAdapter, rgthree, easy-use, AnimateDiff
- Embedding refs in CLIPTextEncode strings extracted as model deps
- ckpt_name / vae_name / lora_name / unet_name now controllable so
workflows can be retargeted per run
Examples
- workflows/{sd15,sdxl,flux_dev}_txt2img.json
- workflows/sdxl_{img2img,inpaint}.json
- workflows/upscale_4x.json
- workflows/{animatediff_video,wan_video_t2v}.json + README
Tests
- 117 tests (105 unit + 8 cloud integration + 4 cross-host security)
- Cloud tests auto-skip without COMFY_CLOUD_API_KEY; verified end-to-end
against live cloud API
Backwards compatibility
- All existing CLI flags continue to work; new behavior is opt-in
(--ws, --input-image, --randomize-seed, --flat-output, etc.)
96 lines
4.2 KiB
Python
96 lines
4.2 KiB
Python
"""Integration tests against the live Comfy Cloud API.
|
|
|
|
These tests are auto-skipped when COMFY_CLOUD_API_KEY is not set.
|
|
They never SUBMIT workflows (would need a paid subscription) — they only
|
|
verify the read-only endpoints we rely on.
|
|
"""
|
|
|
|
from __future__ import annotations
|
|
|
|
import pytest
|
|
|
|
from _common import http_get, parse_model_list, resolve_url
|
|
|
|
|
|
pytestmark = pytest.mark.cloud
|
|
|
|
|
|
class TestCloudEndpointsLive:
|
|
def test_system_stats_reachable(self, cloud_key):
|
|
url = resolve_url("https://cloud.comfy.org", "/system_stats")
|
|
r = http_get(url, headers={"X-API-Key": cloud_key})
|
|
assert r.status == 200
|
|
data = r.json()
|
|
assert "system" in data
|
|
|
|
def test_models_endpoint_routed_to_experiment(self, cloud_key):
|
|
# We expect the skill to route /models/checkpoints → /api/experiment/models/checkpoints
|
|
url = resolve_url("https://cloud.comfy.org", "/models/checkpoints")
|
|
assert "/api/experiment/models/checkpoints" in url
|
|
r = http_get(url, headers={"X-API-Key": cloud_key})
|
|
assert r.status == 200
|
|
|
|
def test_models_endpoint_returns_dicts(self, cloud_key):
|
|
url = resolve_url("https://cloud.comfy.org", "/models/checkpoints")
|
|
r = http_get(url, headers={"X-API-Key": cloud_key})
|
|
data = r.json()
|
|
assert isinstance(data, list)
|
|
if data:
|
|
# Cloud format: list of dicts with `name`
|
|
assert isinstance(data[0], dict)
|
|
assert "name" in data[0]
|
|
# Our parser normalizes both
|
|
normalized = parse_model_list(data)
|
|
assert len(normalized) == len(data)
|
|
|
|
def test_history_renamed_to_v2(self, cloud_key):
|
|
# /history → /api/history_v2 on cloud
|
|
url = resolve_url("https://cloud.comfy.org", "/history/some-fake-id")
|
|
assert "/api/history_v2/some-fake-id" in url
|
|
|
|
def test_object_info_paid_tier(self, cloud_key):
|
|
# On free tier, /object_info returns 403 with a recognizable message
|
|
url = resolve_url("https://cloud.comfy.org", "/object_info")
|
|
r = http_get(url, headers={"X-API-Key": cloud_key})
|
|
# Should be either 200 (paid) or 403 (free) — not 404 / 500
|
|
assert r.status in (200, 403)
|
|
if r.status == 403:
|
|
# Body should mention the limitation
|
|
assert "free tier" in r.text().lower() or "subscription" in r.text().lower()
|
|
|
|
|
|
class TestCloudCheckDepsLive:
|
|
def test_check_deps_against_cloud(self, cloud_key, sd15_workflow):
|
|
from check_deps import check_deps
|
|
report = check_deps(sd15_workflow, host="https://cloud.comfy.org", api_key=cloud_key)
|
|
# Either node check passed OR was skipped (free tier)
|
|
assert "missing_models" in report
|
|
assert "is_cloud" in report and report["is_cloud"] is True
|
|
|
|
def test_flux_workflow_models_resolved_via_aliases(self, cloud_key, flux_workflow):
|
|
"""Flux uses unet/clip folders; cloud has them in diffusion_models/text_encoders.
|
|
With folder aliasing, the check should still find them."""
|
|
from check_deps import check_deps
|
|
report = check_deps(flux_workflow, host="https://cloud.comfy.org", api_key=cloud_key)
|
|
# The exact required Flux files (flux1-dev.safetensors, t5xxl_fp16, clip_l, ae)
|
|
# are present on cloud; with folder aliasing, none should be missing.
|
|
# If this fails, either the cloud removed the model or the aliasing logic broke.
|
|
missing_filenames = {m["value"] for m in report["missing_models"]}
|
|
assert "ae.safetensors" not in missing_filenames, \
|
|
"ae.safetensors should be on cloud's vae folder"
|
|
# t5xxl_fp16 / clip_l should be reachable via the clip → text_encoders alias
|
|
# flux1-dev.safetensors likewise via unet → diffusion_models
|
|
|
|
|
|
class TestHealthCheckLive:
|
|
def test_health_check_passes(self, cloud_key, capsys):
|
|
from health_check import main as health_main
|
|
rc = health_main(["--host", "https://cloud.comfy.org", "--api-key", cloud_key])
|
|
captured = capsys.readouterr()
|
|
# Should produce JSON
|
|
import json
|
|
report = json.loads(captured.out)
|
|
assert report["server"]["reachable"] is True
|
|
assert report["checkpoints"]["queryable"] is True
|
|
assert report["checkpoints"]["count"] > 0
|