mirror of
https://github.com/NousResearch/hermes-agent.git
synced 2026-05-03 17:27:37 +08:00
The audit of v4.1 surfaced ~70 issues across the five scripts and three
reference docs — most user-visible (silent file overwrites, status-error
misclassified as success, X-API-Key leaked to S3 on /api/view redirect,
Cloud endpoints that 404 because they were renamed). v5.0.0 fixes those
and fills the gaps that previously forced users to write their own glue
(WebSocket monitoring, batch/sweep, img2img upload helper, dep auto-fix,
log fetch, health check, example workflows).
Critical fixes
- run_workflow.py: poll_status now checks status_str==error BEFORE
completed:true, so a failed run no longer reports success
- run_workflow.py: download_output streams to disk via safe_path_join,
preserves server subfolder structure (no silent overwrites), and
retries with exponential backoff
- run_workflow.py: refuses to overwrite a link with a literal in
inject_params (would silently break wiring)
- _common.py: _StripSensitiveOnRedirectSession (subclasses
requests.Session.rebuild_auth) drops X-API-Key/Cookie on cross-host
redirects — fixes a real key-leak path through Cloud's signed-URL
download flow. Tested
- Cloud routing (verified live): /history → /history_v2,
/models/<f> → /experiment/models/<f>, plus folder aliases for the
unet ↔ diffusion_models and clip ↔ text_encoders rename
- check_deps.py: distinguishes 200/empty vs 404 folder_not_found vs
403 free-tier; emits concrete fix_command per missing dep
- extract_schema.py: prompt vs negative_prompt determined by tracing
KSampler.{positive,negative} connections (incl. through Reroute /
Primitive nodes) instead of meta-title heuristic; symmetric
duplicate-name resolution; cycle-safe trace_to_node
- hardware_check.py: multi-GPU pick-best, Apple variant detection,
Rosetta detection, WSL2, ROCm --json, disk-space check, optional
PyTorch probe; powershell preferred over deprecated wmic
- comfyui_setup.sh: prefers pipx → uvx → pip --user (with PEP-668
fallback); idempotent — skips relaunch if server already up;
configurable port/workspace; persistent log; SIGINT trap
New scripts
- run_batch.py — count or sweep (cartesian product), parallel up to
cloud tier limit
- ws_monitor.py — real-time WebSocket viewer; saves preview frames
- auto_fix_deps.py — runs comfy node install / model download for
whatever check_deps reports missing (with --dry-run)
- health_check.py — single command that runs the verification checklist
(comfy-cli + server + checkpoints + optional smoke test that cancels
itself to avoid burning compute)
- fetch_logs.py — pull traceback / status messages for a prompt_id
Coverage expansion
- Param patterns now cover Flux (BasicScheduler, BasicGuider,
RandomNoise, ModelSamplingFlux), SD3, Wan/Hunyuan/LTX video,
IPAdapter, rgthree, easy-use, AnimateDiff
- Embedding refs in CLIPTextEncode strings extracted as model deps
- ckpt_name / vae_name / lora_name / unet_name now controllable so
workflows can be retargeted per run
Examples
- workflows/{sd15,sdxl,flux_dev}_txt2img.json
- workflows/sdxl_{img2img,inpaint}.json
- workflows/upscale_4x.json
- workflows/{animatediff_video,wan_video_t2v}.json + README
Tests
- 117 tests (105 unit + 8 cloud integration + 4 cross-host security)
- Cloud tests auto-skip without COMFY_CLOUD_API_KEY; verified end-to-end
against live cloud API
Backwards compatibility
- All existing CLI flags continue to work; new behavior is opt-in
(--ws, --input-image, --randomize-seed, --flat-output, etc.)
226 lines
8.1 KiB
Python
Executable File
226 lines
8.1 KiB
Python
Executable File
#!/usr/bin/env python3
|
|
"""
|
|
auto_fix_deps.py — Run check_deps.py, then attempt to install whatever is missing.
|
|
|
|
For local servers:
|
|
- Missing custom nodes → `comfy node install <package>`
|
|
- Missing models → `comfy model download` (only if a URL is supplied via
|
|
--model-source-file or detected via well-known names)
|
|
|
|
For cloud: prints what would be needed but cannot install (cloud preinstalls
|
|
custom nodes and most models server-side; if something genuinely isn't there,
|
|
ask Comfy support).
|
|
|
|
This is conservative: it never installs without an explicit URL for models
|
|
(downloading the wrong model is hard to undo). Custom nodes from the registry
|
|
are auto-installed by name.
|
|
|
|
Usage:
|
|
python3 auto_fix_deps.py workflow_api.json
|
|
python3 auto_fix_deps.py workflow_api.json --models-from-file urls.json
|
|
python3 auto_fix_deps.py workflow_api.json --dry-run
|
|
"""
|
|
|
|
from __future__ import annotations
|
|
|
|
import argparse
|
|
import json
|
|
import shutil
|
|
import subprocess
|
|
import sys
|
|
from pathlib import Path
|
|
|
|
sys.path.insert(0, str(Path(__file__).resolve().parent))
|
|
from _common import ( # noqa: E402
|
|
DEFAULT_LOCAL_HOST, ENV_API_KEY, emit_json, log, resolve_api_key,
|
|
)
|
|
from check_deps import check_deps # noqa: E402
|
|
from _common import unwrap_workflow # noqa: E402
|
|
|
|
|
|
def comfy_cli_available() -> str | None:
|
|
"""Return command prefix for comfy-cli, or None."""
|
|
if shutil.which("comfy"):
|
|
return "comfy"
|
|
if shutil.which("uvx"):
|
|
return "uvx --from comfy-cli comfy"
|
|
return None
|
|
|
|
|
|
def run_cmd(cmd: list[str], *, dry_run: bool = False) -> tuple[int, str]:
|
|
if dry_run:
|
|
return 0, "[dry-run]"
|
|
log(f"$ {' '.join(cmd)}")
|
|
proc = subprocess.run(cmd, capture_output=True, text=True, check=False)
|
|
out = (proc.stdout or "") + (proc.stderr or "")
|
|
return proc.returncode, out
|
|
|
|
|
|
def install_node(package: str, *, dry_run: bool = False, comfy_cmd: str = "comfy") -> bool:
|
|
cmd = comfy_cmd.split() + ["--skip-prompt", "node", "install", package]
|
|
code, _ = run_cmd(cmd, dry_run=dry_run)
|
|
return code == 0
|
|
|
|
|
|
def install_model(url: str, folder: str, filename: str | None = None,
|
|
*, dry_run: bool = False, comfy_cmd: str = "comfy",
|
|
hf_token: str | None = None, civitai_token: str | None = None) -> bool:
|
|
cmd = comfy_cmd.split() + [
|
|
"--skip-prompt", "model", "download",
|
|
"--url", url,
|
|
"--relative-path", f"models/{folder}",
|
|
]
|
|
if filename:
|
|
cmd.extend(["--filename", filename])
|
|
if hf_token:
|
|
cmd.extend(["--set-hf-api-token", hf_token])
|
|
if civitai_token:
|
|
cmd.extend(["--set-civitai-api-token", civitai_token])
|
|
code, _ = run_cmd(cmd, dry_run=dry_run)
|
|
return code == 0
|
|
|
|
|
|
def main(argv: list[str] | None = None) -> int:
|
|
p = argparse.ArgumentParser(description="Run check_deps and install whatever is missing")
|
|
p.add_argument("workflow")
|
|
p.add_argument("--host", default=DEFAULT_LOCAL_HOST)
|
|
p.add_argument("--api-key", help=f"or set ${ENV_API_KEY}")
|
|
p.add_argument("--models-from-file",
|
|
help="JSON file mapping {model_filename: download_url} for models that need install")
|
|
p.add_argument("--hf-token", help="HuggingFace token for downloads")
|
|
p.add_argument("--civitai-token", help="CivitAI token for downloads")
|
|
p.add_argument("--dry-run", action="store_true",
|
|
help="Show what would be installed without doing it")
|
|
p.add_argument("--no-restart", action="store_true",
|
|
help="Don't suggest restarting the server after node install")
|
|
args = p.parse_args(argv)
|
|
|
|
api_key = resolve_api_key(args.api_key)
|
|
|
|
wf_path = Path(args.workflow).expanduser()
|
|
if not wf_path.exists():
|
|
emit_json({"error": f"Workflow not found: {args.workflow}"})
|
|
return 1
|
|
try:
|
|
with wf_path.open() as f:
|
|
workflow = unwrap_workflow(json.load(f))
|
|
except (ValueError, json.JSONDecodeError) as e:
|
|
emit_json({"error": str(e)})
|
|
return 1
|
|
|
|
report = check_deps(workflow, host=args.host, api_key=api_key)
|
|
|
|
if report["is_ready"]:
|
|
emit_json({"status": "ready", "report": report})
|
|
return 0
|
|
|
|
if report["is_cloud"]:
|
|
emit_json({
|
|
"status": "cannot_fix_cloud",
|
|
"reason": "Comfy Cloud preinstalls nodes; if something is genuinely missing, contact support.",
|
|
"report": report,
|
|
})
|
|
return 1
|
|
|
|
comfy_cmd = comfy_cli_available()
|
|
if not comfy_cmd:
|
|
emit_json({
|
|
"status": "cannot_fix",
|
|
"reason": "comfy-cli not on PATH; install with `pip install comfy-cli` or `pipx install comfy-cli`",
|
|
"report": report,
|
|
})
|
|
return 1
|
|
|
|
actions: list[dict] = []
|
|
failures: list[dict] = []
|
|
|
|
# ---- Install missing custom nodes ----
|
|
seen_packages: set[str] = set()
|
|
for entry in report["missing_nodes"]:
|
|
cmd = entry.get("fix_command", "")
|
|
if cmd.startswith("comfy node install "):
|
|
package = cmd.split(" ")[-1]
|
|
if package in seen_packages:
|
|
continue
|
|
seen_packages.add(package)
|
|
ok = install_node(package, dry_run=args.dry_run, comfy_cmd=comfy_cmd)
|
|
(actions if ok else failures).append({
|
|
"kind": "node", "package": package, "node_class": entry["class_type"],
|
|
"ok": ok,
|
|
})
|
|
else:
|
|
failures.append({
|
|
"kind": "node", "node_class": entry["class_type"],
|
|
"ok": False, "reason": "No registry mapping known. " + entry.get("fix_hint", ""),
|
|
})
|
|
|
|
# ---- Install missing models (only when URL provided) ----
|
|
sources: dict[str, str] = {}
|
|
if args.models_from_file:
|
|
try:
|
|
sources = json.loads(Path(args.models_from_file).read_text())
|
|
except (OSError, json.JSONDecodeError) as e:
|
|
log(f"Could not read --models-from-file: {e}")
|
|
|
|
for entry in report["missing_models"]:
|
|
filename = entry["value"]
|
|
url = sources.get(filename)
|
|
if not url:
|
|
failures.append({
|
|
"kind": "model", "filename": filename, "folder": entry["folder"],
|
|
"ok": False, "reason": "No URL provided in --models-from-file. "
|
|
"Refusing to guess.",
|
|
})
|
|
continue
|
|
ok = install_model(
|
|
url, entry["folder"], filename,
|
|
dry_run=args.dry_run, comfy_cmd=comfy_cmd,
|
|
hf_token=args.hf_token, civitai_token=args.civitai_token,
|
|
)
|
|
(actions if ok else failures).append({
|
|
"kind": "model", "filename": filename, "folder": entry["folder"],
|
|
"url": url, "ok": ok,
|
|
})
|
|
|
|
# ---- Embeddings ----
|
|
for entry in report["missing_embeddings"]:
|
|
emb_name = entry["embedding_name"]
|
|
# Try common extensions in user-supplied source map
|
|
url = (sources.get(f"{emb_name}.pt")
|
|
or sources.get(f"{emb_name}.safetensors")
|
|
or sources.get(emb_name))
|
|
if not url:
|
|
failures.append({
|
|
"kind": "embedding", "name": emb_name,
|
|
"ok": False, "reason": "No URL provided in --models-from-file.",
|
|
})
|
|
continue
|
|
target_filename = (
|
|
f"{emb_name}.safetensors" if url.endswith(".safetensors")
|
|
else f"{emb_name}.pt"
|
|
)
|
|
ok = install_model(
|
|
url, "embeddings", target_filename,
|
|
dry_run=args.dry_run, comfy_cmd=comfy_cmd,
|
|
hf_token=args.hf_token, civitai_token=args.civitai_token,
|
|
)
|
|
(actions if ok else failures).append({
|
|
"kind": "embedding", "name": emb_name, "url": url, "ok": ok,
|
|
})
|
|
|
|
needs_restart = any(a["kind"] == "node" and a.get("ok") for a in actions)
|
|
|
|
emit_json({
|
|
"status": "fixed" if not failures else "partial",
|
|
"actions_taken": actions,
|
|
"failures": failures,
|
|
"needs_server_restart": needs_restart and not args.no_restart,
|
|
"restart_hint": "comfy stop && comfy launch --background",
|
|
"dry_run": args.dry_run,
|
|
})
|
|
return 0 if not failures else 1
|
|
|
|
|
|
if __name__ == "__main__":
|
|
sys.exit(main())
|