/ hermes_cli / debug.py
debug.py
1 """``hermes debug`` debug tools for Hermes Agent. 2 3 Currently supports: 4 hermes debug share Upload debug report (system info + logs) to a 5 paste service and print a shareable URL. 6 By default, log content is run through 7 ``agent.redact.redact_sensitive_text`` with 8 ``force=True`` before upload so credentials in 9 ``~/.hermes/logs/*.log`` are not leaked into 10 the public paste service. Pass ``--no-redact`` 11 to disable. 12 """ 13 14 import io 15 import json 16 import logging 17 import sys 18 import time 19 import urllib.error 20 import urllib.parse 21 import urllib.request 22 from dataclasses import dataclass 23 from pathlib import Path 24 from typing import Optional 25 26 from hermes_constants import get_hermes_home 27 from utils import atomic_replace 28 29 logger = logging.getLogger(__name__) 30 31 # Banner prepended to upload-bound log content when redaction is enabled. 32 # Visible in the public paste so reviewers know the content was sanitized. 33 # Kept short; the trailing newline guarantees the banner sits on its own line. 34 _REDACTION_BANNER = ( 35 "[hermes debug share: log content redacted at upload time. " 36 "run with --no-redact to disable]\n" 37 ) 38 39 40 # --------------------------------------------------------------------------- 41 # Paste services — try paste.rs first, dpaste.com as fallback. 42 # --------------------------------------------------------------------------- 43 44 _PASTE_RS_URL = "https://paste.rs/" 45 _DPASTE_COM_URL = "https://dpaste.com/api/" 46 47 # Maximum bytes to read from a single log file for upload. 48 # paste.rs caps at ~1 MB; we stay under that with headroom. 49 _MAX_LOG_BYTES = 512_000 50 51 # Auto-delete pastes after this many seconds (6 hours). 52 _AUTO_DELETE_SECONDS = 21600 53 54 55 # --------------------------------------------------------------------------- 56 # Pending-deletion tracking (replaces the old fork-and-sleep subprocess). 57 # --------------------------------------------------------------------------- 58 59 def _pending_file() -> Path: 60 """Path to ``~/.hermes/pastes/pending.json``. 61 62 Each entry: ``{"url": "...", "expire_at": <unix_ts>}``. Scheduled 63 DELETEs used to be handled by spawning a detached Python process per 64 paste that slept for 6 hours; those accumulated forever if the user 65 ran ``hermes debug share`` repeatedly. 66 67 Deletion is now driven by the gateway's cron ticker 68 (``gateway/run.py::_start_cron_ticker``) which calls 69 ``_sweep_expired_pastes`` once per hour. ``hermes debug share`` also 70 runs an opportunistic sweep on entry as a fallback for CLI-only users 71 who never start the gateway. 72 """ 73 return get_hermes_home() / "pastes" / "pending.json" 74 75 76 def _load_pending() -> list[dict]: 77 path = _pending_file() 78 if not path.exists(): 79 return [] 80 try: 81 data = json.loads(path.read_text(encoding="utf-8")) 82 if isinstance(data, list): 83 # Filter to well-formed entries only 84 return [ 85 e for e in data 86 if isinstance(e, dict) and "url" in e and "expire_at" in e 87 ] 88 except (OSError, ValueError, json.JSONDecodeError): 89 pass 90 return [] 91 92 93 def _save_pending(entries: list[dict]) -> None: 94 path = _pending_file() 95 try: 96 path.parent.mkdir(parents=True, exist_ok=True) 97 tmp = path.with_suffix(".json.tmp") 98 tmp.write_text(json.dumps(entries, indent=2), encoding="utf-8") 99 atomic_replace(tmp, path) 100 except OSError: 101 # Non-fatal — worst case the user has to run ``hermes debug delete`` 102 # manually. 103 pass 104 105 106 def _record_pending(urls: list[str], delay_seconds: int = _AUTO_DELETE_SECONDS) -> None: 107 """Record *urls* for deletion at ``now + delay_seconds``. 108 109 Only paste.rs URLs are recorded (dpaste.com auto-expires). Entries 110 are merged into any existing pending.json. 111 """ 112 paste_rs_urls = [u for u in urls if _extract_paste_id(u)] 113 if not paste_rs_urls: 114 return 115 116 entries = _load_pending() 117 # Dedupe by URL: keep the later expire_at if same URL appears twice 118 by_url: dict[str, float] = {e["url"]: float(e["expire_at"]) for e in entries} 119 expire_at = time.time() + delay_seconds 120 for u in paste_rs_urls: 121 by_url[u] = max(expire_at, by_url.get(u, 0.0)) 122 merged = [{"url": u, "expire_at": ts} for u, ts in by_url.items()] 123 _save_pending(merged) 124 125 126 def _sweep_expired_pastes(now: Optional[float] = None) -> tuple[int, int]: 127 """Synchronously DELETE any pending pastes whose ``expire_at`` has passed. 128 129 Returns ``(deleted, remaining)``. Best-effort: failed deletes stay in 130 the pending file and will be retried on the next sweep. Silent — 131 intended to be called from every ``hermes debug`` invocation with 132 minimal noise. 133 """ 134 entries = _load_pending() 135 if not entries: 136 return (0, 0) 137 138 current = time.time() if now is None else now 139 deleted = 0 140 remaining: list[dict] = [] 141 142 for entry in entries: 143 try: 144 expire_at = float(entry.get("expire_at", 0)) 145 except (TypeError, ValueError): 146 continue # drop malformed entries 147 if expire_at > current: 148 remaining.append(entry) 149 continue 150 151 url = entry.get("url", "") 152 try: 153 if delete_paste(url): 154 deleted += 1 155 continue 156 except Exception: 157 # Network hiccup, 404 (already gone), etc. — drop the entry 158 # after a grace period; don't retry forever. 159 pass 160 161 # Retain failed deletes for up to 24h past expiration, then give up. 162 if expire_at + 86400 > current: 163 remaining.append(entry) 164 else: 165 deleted += 1 # count as reaped (paste.rs will GC eventually) 166 167 if deleted: 168 _save_pending(remaining) 169 170 return (deleted, len(remaining)) 171 172 173 def _best_effort_sweep_expired_pastes() -> None: 174 """Attempt pending-paste cleanup without letting /debug fail offline.""" 175 try: 176 _sweep_expired_pastes() 177 except Exception: 178 pass 179 180 181 # --------------------------------------------------------------------------- 182 # Privacy / delete helpers 183 # --------------------------------------------------------------------------- 184 185 _PRIVACY_NOTICE = """\ 186 ⚠️ This will upload the following to a public paste service: 187 • System info (OS, Python version, Hermes version, provider, which API keys 188 are configured — NOT the actual keys) 189 • Recent log lines (agent.log, errors.log, gateway.log — may contain 190 conversation fragments and file paths) 191 • Full agent.log and gateway.log (up to 512 KB each — likely contains 192 conversation content, tool outputs, and file paths) 193 194 Pastes auto-delete after 6 hours. 195 """ 196 197 _GATEWAY_PRIVACY_NOTICE = ( 198 "⚠️ **Privacy notice:** This uploads system info + recent log tails " 199 "(may contain conversation fragments) to a public paste service. " 200 "Full logs are NOT included from the gateway — use `hermes debug share` " 201 "from the CLI for full log uploads.\n" 202 "Pastes auto-delete after 6 hours." 203 ) 204 205 206 def _extract_paste_id(url: str) -> Optional[str]: 207 """Extract the paste ID from a paste.rs or dpaste.com URL. 208 209 Returns the ID string, or None if the URL doesn't match a known service. 210 """ 211 url = url.strip().rstrip("/") 212 for prefix in ("https://paste.rs/", "http://paste.rs/"): 213 if url.startswith(prefix): 214 return url[len(prefix):] 215 return None 216 217 218 def delete_paste(url: str) -> bool: 219 """Delete a paste from paste.rs. Returns True on success. 220 221 Only paste.rs supports unauthenticated DELETE. dpaste.com pastes 222 expire automatically but cannot be deleted via API. 223 """ 224 paste_id = _extract_paste_id(url) 225 if not paste_id: 226 raise ValueError( 227 f"Cannot delete: only paste.rs URLs are supported. Got: {url}" 228 ) 229 230 target = f"{_PASTE_RS_URL}{paste_id}" 231 req = urllib.request.Request( 232 target, method="DELETE", 233 headers={"User-Agent": "hermes-agent/debug-share"}, 234 ) 235 with urllib.request.urlopen(req, timeout=30) as resp: 236 return 200 <= resp.status < 300 237 238 239 def _schedule_auto_delete(urls: list[str], delay_seconds: int = _AUTO_DELETE_SECONDS): 240 """Record *urls* for deletion ``delay_seconds`` from now. 241 242 Previously this spawned a detached Python subprocess per call that slept 243 for 6 hours and then issued DELETE requests. Those subprocesses leaked — 244 every ``hermes debug share`` invocation added ~20 MB of resident Python 245 interpreters that never exited until the sleep completed. 246 247 The replacement is stateless: we append to ``~/.hermes/pastes/pending.json`` 248 and the gateway's cron ticker sweeps expired entries once per hour. 249 ``hermes debug share`` also runs an opportunistic sweep as a fallback 250 for CLI-only users. If neither runs again, paste.rs's own retention 251 policy handles cleanup. 252 """ 253 _record_pending(urls, delay_seconds=delay_seconds) 254 255 256 def _delete_hint(url: str) -> str: 257 """Return a one-liner delete command for the given paste URL.""" 258 paste_id = _extract_paste_id(url) 259 if paste_id: 260 return f"hermes debug delete {url}" 261 # dpaste.com — no API delete, expires on its own. 262 return "(auto-expires per dpaste.com policy)" 263 264 265 def _upload_paste_rs(content: str) -> str: 266 """Upload to paste.rs. Returns the paste URL. 267 268 paste.rs accepts a plain POST body and returns the URL directly. 269 """ 270 data = content.encode("utf-8") 271 req = urllib.request.Request( 272 _PASTE_RS_URL, data=data, method="POST", 273 headers={ 274 "Content-Type": "text/plain; charset=utf-8", 275 "User-Agent": "hermes-agent/debug-share", 276 }, 277 ) 278 with urllib.request.urlopen(req, timeout=30) as resp: 279 url = resp.read().decode("utf-8").strip() 280 if not url.startswith("http"): 281 raise ValueError(f"Unexpected response from paste.rs: {url[:200]}") 282 return url 283 284 285 def _upload_dpaste_com(content: str, expiry_days: int = 7) -> str: 286 """Upload to dpaste.com. Returns the paste URL. 287 288 dpaste.com uses multipart form data. 289 """ 290 boundary = "----HermesDebugBoundary9f3c" 291 292 def _field(name: str, value: str) -> str: 293 return ( 294 f"--{boundary}\r\n" 295 f'Content-Disposition: form-data; name="{name}"\r\n' 296 f"\r\n" 297 f"{value}\r\n" 298 ) 299 300 body = ( 301 _field("content", content) 302 + _field("syntax", "text") 303 + _field("expiry_days", str(expiry_days)) 304 + f"--{boundary}--\r\n" 305 ).encode("utf-8") 306 307 req = urllib.request.Request( 308 _DPASTE_COM_URL, data=body, method="POST", 309 headers={ 310 "Content-Type": f"multipart/form-data; boundary={boundary}", 311 "User-Agent": "hermes-agent/debug-share", 312 }, 313 ) 314 with urllib.request.urlopen(req, timeout=30) as resp: 315 url = resp.read().decode("utf-8").strip() 316 if not url.startswith("http"): 317 raise ValueError(f"Unexpected response from dpaste.com: {url[:200]}") 318 return url 319 320 321 def upload_to_pastebin(content: str, expiry_days: int = 7) -> str: 322 """Upload *content* to a paste service, trying paste.rs then dpaste.com. 323 324 Returns the paste URL on success, raises on total failure. 325 """ 326 errors: list[str] = [] 327 328 # Try paste.rs first (simple, fast) 329 try: 330 return _upload_paste_rs(content) 331 except Exception as exc: 332 errors.append(f"paste.rs: {exc}") 333 334 # Fallback: dpaste.com (supports expiry) 335 try: 336 return _upload_dpaste_com(content, expiry_days=expiry_days) 337 except Exception as exc: 338 errors.append(f"dpaste.com: {exc}") 339 340 raise RuntimeError( 341 "Failed to upload to any paste service:\n " + "\n ".join(errors) 342 ) 343 344 345 # --------------------------------------------------------------------------- 346 # Log file reading 347 # --------------------------------------------------------------------------- 348 349 350 @dataclass 351 class LogSnapshot: 352 """Single-read snapshot of a log file used by debug-share.""" 353 354 path: Optional[Path] 355 tail_text: str 356 full_text: Optional[str] 357 358 359 def _primary_log_path(log_name: str) -> Optional[Path]: 360 """Where *log_name* would live if present. Doesn't check existence.""" 361 from hermes_cli.logs import LOG_FILES 362 363 filename = LOG_FILES.get(log_name) 364 return (get_hermes_home() / "logs" / filename) if filename else None 365 366 367 def _resolve_log_path(log_name: str) -> Optional[Path]: 368 """Find the log file for *log_name*, falling back to the .1 rotation. 369 370 Returns the first non-empty candidate (primary, then .1), or None. 371 Callers distinguish 'empty primary' from 'truly missing' via 372 :func:`_primary_log_path`. 373 """ 374 primary = _primary_log_path(log_name) 375 if primary is None: 376 return None 377 378 if primary.exists() and primary.stat().st_size > 0: 379 return primary 380 381 rotated = primary.parent / f"{primary.name}.1" 382 if rotated.exists() and rotated.stat().st_size > 0: 383 return rotated 384 385 return None 386 387 388 def _redact_log_text(text: str) -> str: 389 """Run ``redact_sensitive_text`` with ``force=True`` over upload-bound text. 390 391 Uses ``force=True`` so redaction fires regardless of the operator's 392 ``security.redact_secrets`` setting. The local on-disk log file is 393 not modified; only the in-memory copy headed for the public paste 394 service is sanitized. Returns the redacted text (or the original 395 when empty / non-string). 396 """ 397 if not text: 398 return text 399 from agent.redact import redact_sensitive_text 400 401 return redact_sensitive_text(text, force=True) 402 403 404 def _capture_log_snapshot( 405 log_name: str, 406 *, 407 tail_lines: int, 408 max_bytes: int = _MAX_LOG_BYTES, 409 redact: bool = True, 410 ) -> LogSnapshot: 411 """Capture a log once and derive summary/full-log views from it. 412 413 The report tail and standalone log upload must come from the same file 414 snapshot. Otherwise a rotation/truncate between reads can make the report 415 look newer than the uploaded ``agent.log`` paste. 416 417 When ``redact`` is True (the default), both ``tail_text`` and 418 ``full_text`` are run through ``_redact_log_text`` so the snapshot 419 returned is upload-safe. The on-disk log file is never modified. 420 Pass ``redact=False`` to capture original log content (used by 421 ``hermes debug share --no-redact``). 422 """ 423 log_path = _resolve_log_path(log_name) 424 if log_path is None: 425 primary = _primary_log_path(log_name) 426 tail = "(file empty)" if primary and primary.exists() else "(file not found)" 427 return LogSnapshot(path=None, tail_text=tail, full_text=None) 428 429 try: 430 size = log_path.stat().st_size 431 if size == 0: 432 # race: file was truncated between _resolve_log_path and stat 433 return LogSnapshot(path=log_path, tail_text="(file empty)", full_text=None) 434 435 with open(log_path, "rb") as f: 436 if size <= max_bytes: 437 raw = f.read() 438 truncated = False 439 else: 440 # Read from the end until we have enough bytes for the 441 # standalone upload and enough newline context to render the 442 # summary tail from the same snapshot. 443 chunk_size = 8192 444 pos = size 445 chunks: list[bytes] = [] 446 total = 0 447 newline_count = 0 448 449 while pos > 0 and (total < max_bytes or newline_count <= tail_lines + 1) and total < max_bytes * 2: 450 read_size = min(chunk_size, pos) 451 pos -= read_size 452 f.seek(pos) 453 chunk = f.read(read_size) 454 chunks.insert(0, chunk) 455 total += len(chunk) 456 newline_count += chunk.count(b"\n") 457 chunk_size = min(chunk_size * 2, 65536) 458 459 raw = b"".join(chunks) 460 truncated = pos > 0 461 462 full_raw = raw 463 if truncated and len(full_raw) > max_bytes: 464 cut = len(full_raw) - max_bytes 465 # Check whether the cut lands exactly on a line boundary. If the 466 # byte just before the cut position is a newline the first retained 467 # byte starts a complete line and we should keep it. Only drop a 468 # partial first line when we're genuinely mid-line. 469 on_boundary = cut > 0 and full_raw[cut - 1 : cut] == b"\n" 470 full_raw = full_raw[cut:] 471 if not on_boundary and b"\n" in full_raw: 472 full_raw = full_raw.split(b"\n", 1)[1] 473 474 all_text = raw.decode("utf-8", errors="replace") 475 tail_text = "".join(all_text.splitlines(keepends=True)[-tail_lines:]).rstrip("\n") 476 477 full_text = full_raw.decode("utf-8", errors="replace") 478 if truncated: 479 full_text = f"[... truncated — showing last ~{max_bytes // 1024}KB ...]\n{full_text}" 480 481 if redact: 482 tail_text = _redact_log_text(tail_text) 483 full_text = _redact_log_text(full_text) 484 485 return LogSnapshot(path=log_path, tail_text=tail_text, full_text=full_text) 486 except Exception as exc: 487 return LogSnapshot(path=log_path, tail_text=f"(error reading: {exc})", full_text=None) 488 489 490 def _capture_default_log_snapshots( 491 log_lines: int, *, redact: bool = True 492 ) -> dict[str, LogSnapshot]: 493 """Capture all logs used by debug-share exactly once. 494 495 ``redact`` is forwarded to each ``_capture_log_snapshot`` call so all 496 captured logs share the same redaction policy for a given run. 497 """ 498 errors_lines = min(log_lines, 100) 499 return { 500 "agent": _capture_log_snapshot( 501 "agent", tail_lines=log_lines, redact=redact 502 ), 503 "errors": _capture_log_snapshot( 504 "errors", tail_lines=errors_lines, redact=redact 505 ), 506 "gateway": _capture_log_snapshot( 507 "gateway", tail_lines=errors_lines, redact=redact 508 ), 509 } 510 511 512 # --------------------------------------------------------------------------- 513 # Debug report collection 514 # --------------------------------------------------------------------------- 515 516 def _capture_dump() -> str: 517 """Run ``hermes dump`` and return its stdout as a string.""" 518 from hermes_cli.dump import run_dump 519 520 class _FakeArgs: 521 show_keys = False 522 523 old_stdout = sys.stdout 524 sys.stdout = capture = io.StringIO() 525 try: 526 run_dump(_FakeArgs()) 527 except SystemExit: 528 pass 529 finally: 530 sys.stdout = old_stdout 531 532 return capture.getvalue() 533 534 535 def collect_debug_report( 536 *, 537 log_lines: int = 200, 538 dump_text: str = "", 539 log_snapshots: Optional[dict[str, LogSnapshot]] = None, 540 ) -> str: 541 """Build the summary debug report: system dump + log tails. 542 543 Parameters 544 ---------- 545 log_lines 546 Number of recent lines to include per log file. 547 dump_text 548 Pre-captured dump output. If empty, ``hermes dump`` is run 549 internally. 550 551 Returns the report as a plain-text string ready for upload. 552 """ 553 buf = io.StringIO() 554 555 if not dump_text: 556 dump_text = _capture_dump() 557 buf.write(dump_text) 558 559 if log_snapshots is None: 560 log_snapshots = _capture_default_log_snapshots(log_lines) 561 562 # ── Recent log tails (summary only) ────────────────────────────────── 563 buf.write("\n\n") 564 buf.write(f"--- agent.log (last {log_lines} lines) ---\n") 565 buf.write(log_snapshots["agent"].tail_text) 566 buf.write("\n\n") 567 568 errors_lines = min(log_lines, 100) 569 buf.write(f"--- errors.log (last {errors_lines} lines) ---\n") 570 buf.write(log_snapshots["errors"].tail_text) 571 buf.write("\n\n") 572 573 buf.write(f"--- gateway.log (last {errors_lines} lines) ---\n") 574 buf.write(log_snapshots["gateway"].tail_text) 575 buf.write("\n") 576 577 return buf.getvalue() 578 579 580 # --------------------------------------------------------------------------- 581 # CLI entry points 582 # --------------------------------------------------------------------------- 583 584 def run_debug_share(args): 585 """Collect debug report + full logs, upload each, print URLs.""" 586 _best_effort_sweep_expired_pastes() 587 588 log_lines = getattr(args, "lines", 200) 589 expiry = getattr(args, "expire", 7) 590 local_only = getattr(args, "local", False) 591 redact = not getattr(args, "no_redact", False) 592 593 if not local_only: 594 print(_PRIVACY_NOTICE) 595 596 print("Collecting debug report...") 597 598 # Capture dump once — prepended to every paste for context. 599 # The dump is already redacted at extract time via dump.py:_redact; 600 # log_snapshots are redacted by _capture_default_log_snapshots when 601 # redact=True so credentials never reach the public paste service. 602 dump_text = _capture_dump() 603 log_snapshots = _capture_default_log_snapshots(log_lines, redact=redact) 604 605 if redact: 606 logger.info( 607 "hermes debug share: applied force-mode redaction to log snapshots before upload" 608 ) 609 610 report = collect_debug_report( 611 log_lines=log_lines, 612 dump_text=dump_text, 613 log_snapshots=log_snapshots, 614 ) 615 agent_log = log_snapshots["agent"].full_text 616 gateway_log = log_snapshots["gateway"].full_text 617 618 # Prepend dump header to each full log so every paste is self-contained. 619 if agent_log: 620 agent_log = dump_text + "\n\n--- full agent.log ---\n" + agent_log 621 if gateway_log: 622 gateway_log = dump_text + "\n\n--- full gateway.log ---\n" + gateway_log 623 624 # Visible banner so reviewers reading the public paste know redaction 625 # was applied at upload time. Banner is omitted under --no-redact. 626 if redact: 627 report = _REDACTION_BANNER + report 628 if agent_log: 629 agent_log = _REDACTION_BANNER + agent_log 630 if gateway_log: 631 gateway_log = _REDACTION_BANNER + gateway_log 632 633 if local_only: 634 print(report) 635 if agent_log: 636 print(f"\n\n{'=' * 60}") 637 print("FULL agent.log") 638 print(f"{'=' * 60}\n") 639 print(agent_log) 640 if gateway_log: 641 print(f"\n\n{'=' * 60}") 642 print("FULL gateway.log") 643 print(f"{'=' * 60}\n") 644 print(gateway_log) 645 return 646 647 print("Uploading...") 648 urls: dict[str, str] = {} 649 failures: list[str] = [] 650 651 # 1. Summary report (required) 652 try: 653 urls["Report"] = upload_to_pastebin(report, expiry_days=expiry) 654 except RuntimeError as exc: 655 print(f"\nUpload failed: {exc}", file=sys.stderr) 656 print("\nFull report printed below — copy-paste it manually:\n") 657 print(report) 658 sys.exit(1) 659 660 # 2. Full agent.log (optional) 661 if agent_log: 662 try: 663 urls["agent.log"] = upload_to_pastebin(agent_log, expiry_days=expiry) 664 except Exception as exc: 665 failures.append(f"agent.log: {exc}") 666 667 # 3. Full gateway.log (optional) 668 if gateway_log: 669 try: 670 urls["gateway.log"] = upload_to_pastebin(gateway_log, expiry_days=expiry) 671 except Exception as exc: 672 failures.append(f"gateway.log: {exc}") 673 674 # Print results 675 label_width = max(len(k) for k in urls) 676 print(f"\nDebug report uploaded:") 677 for label, url in urls.items(): 678 print(f" {label:<{label_width}} {url}") 679 680 if failures: 681 print(f"\n (failed to upload: {', '.join(failures)})") 682 683 # Schedule auto-deletion after 6 hours 684 _schedule_auto_delete(list(urls.values())) 685 print(f"\n⏱ Pastes will auto-delete in 6 hours.") 686 687 # Manual delete fallback 688 print(f"To delete now: hermes debug delete <url>") 689 690 print(f"\nShare these links with the Hermes team for support.") 691 692 693 def run_debug_delete(args): 694 """Delete one or more paste URLs uploaded by /debug.""" 695 urls = getattr(args, "urls", []) 696 if not urls: 697 print("Usage: hermes debug delete <url> [<url> ...]") 698 print(" Deletes paste.rs pastes uploaded by 'hermes debug share'.") 699 return 700 701 for url in urls: 702 try: 703 ok = delete_paste(url) 704 if ok: 705 print(f" ✓ Deleted: {url}") 706 else: 707 print(f" ✗ Failed to delete: {url} (unexpected response)") 708 except ValueError as exc: 709 print(f" ✗ {exc}") 710 except Exception as exc: 711 print(f" ✗ Could not delete {url}: {exc}") 712 713 714 def run_debug(args): 715 """Route debug subcommands.""" 716 # Opportunistic sweep of expired pastes on every ``hermes debug`` call. 717 # Replaces the old per-paste sleeping subprocess that used to leak as 718 # one orphaned Python interpreter per scheduled deletion. Silent and 719 # best-effort — any failure is swallowed so ``hermes debug`` stays 720 # reliable even when offline. 721 try: 722 _sweep_expired_pastes() 723 except Exception: 724 pass 725 726 subcmd = getattr(args, "debug_command", None) 727 if subcmd == "share": 728 run_debug_share(args) 729 elif subcmd == "delete": 730 run_debug_delete(args) 731 else: 732 # Default: show help 733 print("Usage: hermes debug <command>") 734 print() 735 print("Commands:") 736 print(" share Upload debug report to a paste service and print URL") 737 print(" delete Delete a previously uploaded paste") 738 print() 739 print("Options (share):") 740 print(" --lines N Number of log lines to include (default: 200)") 741 print(" --expire N Paste expiry in days (default: 7)") 742 print(" --local Print report locally instead of uploading") 743 print(" --no-redact Disable upload-time secret redaction (default: redact)") 744 print() 745 print("Options (delete):") 746 print(" <url> ... One or more paste URLs to delete")