end_to_end.py
1 #!/usr/bin/env python3 2 """ 3 Radicle + Auths Full-Stack E2E Smoke Test 4 5 Orchestrates the entire local stack: 6 1. Builds auths CLI, radicle-httpd, and the radicle-explorer frontend 7 2. Creates two Radicle nodes with deterministic keys 8 3. Creates a KERI identity and links both nodes as devices 9 4. Creates a new Radicle project (git repo) 10 5. Starts a Radicle node to host the project 11 6. Pushes a signed patch from device 1 12 7. Pushes a signed patch from device 2 13 8. Starts radicle-httpd to serve the API 14 9. Starts the radicle-explorer frontend 15 10. Runs HTTP assertions against the API 16 11. Prints URLs for manual browser inspection 17 18 Usage: 19 python3 docs/smoketests/end_to_end.py 20 # skip cargo/npm builds 21 python3 docs/smoketests/end_to_end.py --skip-build 22 # keep services running for manual testing 23 python3 docs/smoketests/end_to_end.py --keep-alive 24 # skip frontend build/serve 25 python3 docs/smoketests/end_to_end.py --no-frontend 26 # open browser at the end 27 python3 docs/smoketests/end_to_end.py --open-browser 28 # ALL 29 python3 docs/smoketests/end_to_end.py --keep-alive --open-browser 30 31 Requirements: 32 - Python 3.10+ 33 - rad CLI installed (https://radicle.xyz) 34 - Rust toolchain (cargo) 35 - Node.js 20+ and npm 36 """ 37 38 from __future__ import annotations 39 40 import argparse 41 import atexit 42 import json 43 import os 44 import shutil 45 import signal 46 import subprocess 47 import sys 48 import tempfile 49 import textwrap 50 import time 51 import urllib.error 52 import urllib.request 53 from dataclasses import dataclass, field 54 from pathlib import Path 55 from typing import Any 56 57 # ── Paths ──────────────────────────────────────────────────────────────────── 58 59 SCRIPT_DIR = Path(__file__).resolve().parent 60 AUTHS_REPO = SCRIPT_DIR.parent.parent # auths-base/auths 61 EXPLORER_REPO = AUTHS_REPO.parent.parent / "radicle-base" / "radicle-explorer" 62 HTTPD_CRATE = EXPLORER_REPO / "radicle-httpd" 63 VERIFIER_CRATE = AUTHS_REPO / "crates" / "auths-verifier" 64 VERIFIER_TS = AUTHS_REPO / "packages" / "auths-verifier-ts" 65 66 # ── Ports ──────────────────────────────────────────────────────────────────── 67 68 NODE1_P2P_PORT = 19876 69 NODE2_P2P_PORT = 19877 70 HTTPD_PORT = 8080 # must match defaultLocalHttpdPort in explorer config 71 FRONTEND_PORT = 3000 72 73 # ── Colors ─────────────────────────────────────────────────────────────────── 74 75 RED = "\033[0;31m" 76 GREEN = "\033[0;32m" 77 YELLOW = "\033[1;33m" 78 BLUE = "\033[0;34m" 79 CYAN = "\033[0;36m" 80 BOLD = "\033[1m" 81 DIM = "\033[2m" 82 NC = "\033[0m" 83 84 85 def _c(color: str, text: str) -> str: 86 return f"{color}{text}{NC}" 87 88 89 # ── Logging ────────────────────────────────────────────────────────────────── 90 91 92 def phase(title: str) -> None: 93 print() 94 print(_c(BLUE, "=" * 64)) 95 print(_c(BOLD, f" {title}")) 96 print(_c(BLUE, "=" * 64)) 97 print() 98 99 100 def info(msg: str) -> None: 101 print(f" {_c(CYAN, chr(0x2192))} {msg}") 102 103 104 def ok(msg: str) -> None: 105 print(f" {_c(GREEN, chr(0x2713))} {msg}") 106 107 108 def fail(msg: str) -> None: 109 print(f" {_c(RED, chr(0x2717))} {msg}") 110 111 112 def warn(msg: str) -> None: 113 print(f" {_c(YELLOW, chr(0x26A0))} {msg}") 114 115 116 # ── Subprocess helpers ─────────────────────────────────────────────────────── 117 118 119 def run( 120 cmd: list[str], 121 *, 122 env: dict[str, str] | None = None, 123 cwd: str | Path | None = None, 124 capture: bool = True, 125 check: bool = True, 126 timeout: int = 120, 127 ) -> subprocess.CompletedProcess[str]: 128 """Run a command, merging env with os.environ.""" 129 merged_env = {**os.environ, **(env or {})} 130 try: 131 result = subprocess.run( 132 cmd, 133 env=merged_env, 134 cwd=cwd, 135 capture_output=capture, 136 text=True, 137 check=check, 138 timeout=timeout, 139 ) 140 return result 141 except subprocess.CalledProcessError as e: 142 fail(f"Command failed: {' '.join(cmd)}") 143 if e.stdout: 144 for line in e.stdout.strip().splitlines(): 145 print(f" {line}") 146 if e.stderr: 147 for line in e.stderr.strip().splitlines(): 148 print(f" {_c(DIM, line)}") 149 raise 150 except subprocess.TimeoutExpired: 151 fail(f"Command timed out after {timeout}s: {' '.join(cmd)}") 152 raise 153 154 155 def spawn( 156 cmd: list[str], 157 *, 158 env: dict[str, str] | None = None, 159 cwd: str | Path | None = None, 160 log_path: Path | None = None, 161 ) -> subprocess.Popen[str]: 162 """Spawn a background process. Stdout/stderr go to log_path or DEVNULL.""" 163 merged_env = {**os.environ, **(env or {})} 164 if log_path: 165 log_file = open(log_path, "w") # noqa: SIM115 166 stdout = log_file 167 stderr = subprocess.STDOUT 168 else: 169 log_file = None # type: ignore[assignment] 170 stdout = subprocess.DEVNULL # type: ignore[assignment] 171 stderr = subprocess.DEVNULL # type: ignore[assignment] 172 proc = subprocess.Popen( 173 cmd, 174 env=merged_env, 175 cwd=cwd, 176 stdout=stdout, 177 stderr=stderr, 178 text=True, 179 ) 180 return proc 181 182 183 def wait_for_http(url: str, *, timeout: int = 30, label: str = "") -> bool: 184 """Poll an HTTP endpoint until it responds 2xx.""" 185 deadline = time.monotonic() + timeout 186 last_err = "" 187 while time.monotonic() < deadline: 188 try: 189 req = urllib.request.Request(url, method="GET") 190 with urllib.request.urlopen(req, timeout=5) as resp: 191 if resp.status < 400: 192 return True 193 except Exception as e: 194 last_err = str(e) 195 time.sleep(0.5) 196 warn(f"Timed out waiting for {label or url}: {last_err}") 197 return False 198 199 200 def http_get_json(url: str) -> Any: 201 """GET a URL, parse JSON.""" 202 req = urllib.request.Request(url, method="GET") 203 with urllib.request.urlopen(req, timeout=10) as resp: 204 return json.loads(resp.read().decode()) 205 206 207 # ── Workspace ──────────────────────────────────────────────────────────────── 208 209 210 @dataclass 211 class Workspace: 212 root: Path 213 auths_home: Path = field(init=False) 214 node1_home: Path = field(init=False) 215 node2_home: Path = field(init=False) 216 node1_seed_path: Path = field(init=False) 217 node2_seed_path: Path = field(init=False) 218 project_dir: Path = field(init=False) 219 project_node2_dir: Path = field(init=False) 220 keychain_file: Path = field(init=False) 221 metadata_file: Path = field(init=False) 222 allowed_signers: Path = field(init=False) 223 logs_dir: Path = field(init=False) 224 225 # Populated during execution 226 node1_did: str = "" 227 node2_did: str = "" 228 node1_nid: str = "" 229 node2_nid: str = "" 230 controller_did: str = "" 231 project_rid: str = "" 232 233 # Background processes 234 _procs: list[subprocess.Popen] = field(default_factory=list, repr=False) 235 236 def __post_init__(self) -> None: 237 self.auths_home = self.root / ".auths" 238 self.node1_home = self.root / "rad-node-1" 239 self.node2_home = self.root / "rad-node-2" 240 self.node1_seed_path = self.root / "node1.seed" 241 self.node2_seed_path = self.root / "node2.seed" 242 self.project_dir = self.root / "e2e-project" 243 self.project_node2_dir = self.root / "e2e-project-node2" 244 self.keychain_file = self.root / "keys.enc" 245 self.metadata_file = self.root / "metadata.json" 246 self.allowed_signers = self.root / "allowed_signers" 247 self.logs_dir = self.root / "logs" 248 249 for d in [ 250 self.auths_home, 251 self.node1_home, 252 self.node2_home, 253 self.logs_dir, 254 ]: 255 d.mkdir(parents=True, exist_ok=True) 256 257 def base_env(self) -> dict[str, str]: 258 """Shared env vars for headless operation.""" 259 return { 260 "RUSTUP_TOOLCHAIN": "1.93", 261 "AUTHS_KEYCHAIN_BACKEND": "file", 262 "AUTHS_KEYCHAIN_FILE": str(self.keychain_file), 263 "AUTHS_PASSPHRASE": "e2e-smoke-test", 264 "RAD_PASSPHRASE": "e2e-rad", 265 "GIT_AUTHOR_NAME": "Smoke Tester", 266 "GIT_AUTHOR_EMAIL": "smoke@test.local", 267 "GIT_COMMITTER_NAME": "Smoke Tester", 268 "GIT_COMMITTER_EMAIL": "smoke@test.local", 269 } 270 271 def node_env(self, node: int) -> dict[str, str]: 272 """Env for a specific Radicle node.""" 273 home = self.node1_home if node == 1 else self.node2_home 274 return {**self.base_env(), "RAD_HOME": str(home)} 275 276 def auths_env(self) -> dict[str, str]: 277 """Env for auths CLI.""" 278 return self.base_env() 279 280 def register_proc(self, proc: subprocess.Popen) -> None: 281 self._procs.append(proc) 282 283 def cleanup(self, rad: str | None = None) -> None: 284 """Stop Radicle nodes and kill all background processes.""" 285 if rad: 286 for home in [self.node1_home, self.node2_home]: 287 try: 288 subprocess.run( 289 [rad, "node", "stop"], 290 env={**os.environ, **self.base_env(), "RAD_HOME": str(home)}, 291 capture_output=True, text=True, timeout=5, 292 ) 293 except Exception: 294 pass 295 for proc in reversed(self._procs): 296 if proc.poll() is None: 297 try: 298 proc.terminate() 299 proc.wait(timeout=5) 300 except Exception: 301 proc.kill() 302 self._procs.clear() 303 304 305 # ── Binary resolution ──────────────────────────────────────────────────────── 306 307 308 def find_auths_bin() -> Path: 309 for profile in ["release", "debug"]: 310 p = AUTHS_REPO / "target" / profile / "auths" 311 if p.is_file() and os.access(p, os.X_OK): 312 return p 313 raise FileNotFoundError("auths binary not found. Run: cargo build --release --package auths-cli") 314 315 316 def find_auths_sign_bin() -> Path: 317 for profile in ["release", "debug"]: 318 p = AUTHS_REPO / "target" / profile / "auths-sign" 319 if p.is_file() and os.access(p, os.X_OK): 320 return p 321 raise FileNotFoundError("auths-sign binary not found. Run: cargo build --release --package auths-cli") 322 323 324 def find_httpd_bin() -> Path: 325 # Prefer locally compiled httpd from explorer (has auths-radicle integration) 326 local = HTTPD_CRATE / "target" / "debug" / "radicle-httpd" 327 if local.is_file() and os.access(local, os.X_OK): 328 return local 329 local_release = HTTPD_CRATE / "target" / "release" / "radicle-httpd" 330 if local_release.is_file() and os.access(local_release, os.X_OK): 331 return local_release 332 # Fall back to system radicle-httpd 333 system = shutil.which("radicle-httpd") 334 if system: 335 return Path(system) 336 raise FileNotFoundError( 337 "radicle-httpd not found. Run:\n" 338 f" cd {HTTPD_CRATE} && cargo build" 339 ) 340 341 342 def find_rad_bin() -> Path: 343 p = shutil.which("rad") 344 if p: 345 return Path(p) 346 raise FileNotFoundError("rad CLI not found. Install from https://radicle.xyz") 347 348 349 # ── Phase implementations ──────────────────────────────────────────────────── 350 351 352 def phase_0_prerequisites(args: argparse.Namespace) -> dict[str, Path]: 353 """Verify all tools exist. Optionally build.""" 354 phase("Phase 0: Prerequisites & Build") 355 356 rad = find_rad_bin() 357 ok(f"rad CLI: {rad}") 358 359 if not args.skip_build: 360 info("Building auths CLI (release)...") 361 run( 362 ["cargo", "build", "--release", "--package", "auths-cli"], 363 cwd=AUTHS_REPO, 364 capture=False, 365 ) 366 ok("auths CLI built") 367 368 info("Building radicle-httpd (debug, with auths-radicle)...") 369 run( 370 ["cargo", "build"], 371 cwd=HTTPD_CRATE, 372 capture=False, 373 ) 374 ok("radicle-httpd built") 375 376 auths = find_auths_bin() 377 auths_sign = find_auths_sign_bin() 378 httpd = find_httpd_bin() 379 380 ok(f"auths: {auths}") 381 ok(f"auths-sign: {auths_sign}") 382 ok(f"radicle-httpd: {httpd}") 383 384 v = run([str(rad), "--version"]).stdout.strip() 385 info(f"rad version: {v}") 386 v = run([str(auths), "--version"]).stdout.strip() 387 info(f"auths version: {v}") 388 389 rustc_v = run(["rustc", "+1.93", "--version"], check=False) 390 if rustc_v.returncode != 0: 391 fail("Rust 1.93 toolchain not installed. Run: rustup install 1.93") 392 raise RuntimeError("Missing Rust 1.93") 393 info(f"rustc version: {rustc_v.stdout.strip()}") 394 395 if not args.no_frontend: 396 node = shutil.which("node") 397 npm = shutil.which("npm") 398 if not node or not npm: 399 raise FileNotFoundError("Node.js and npm are required for the frontend") 400 ok(f"node: {node}") 401 ok(f"npm: {npm}") 402 403 return {"rad": rad, "auths": auths, "auths_sign": auths_sign, "httpd": httpd} 404 405 406 def phase_1_setup_nodes(ws: Workspace, bins: dict[str, Path]) -> None: 407 """Initialize two Radicle nodes with deterministic keys.""" 408 phase("Phase 1: Set up two Radicle nodes") 409 410 rad = str(bins["rad"]) 411 412 # Generate deterministic seeds 413 node1_seed_hex = "aa" * 32 414 node2_seed_hex = "bb" * 32 415 416 ws.node1_seed_path.write_bytes(bytes.fromhex(node1_seed_hex)) 417 ws.node2_seed_path.write_bytes(bytes.fromhex(node2_seed_hex)) 418 419 ok("Generated deterministic seeds") 420 421 info("Initializing node 1...") 422 run( 423 [rad, "auth", "--alias", "device-1"], 424 env={**ws.node_env(1), "RAD_KEYGEN_SEED": node1_seed_hex}, 425 ) 426 427 info("Initializing node 2...") 428 run( 429 [rad, "auth", "--alias", "device-2"], 430 env={**ws.node_env(2), "RAD_KEYGEN_SEED": node2_seed_hex}, 431 ) 432 433 ws.node1_did = run( 434 [rad, "self", "--did"], env=ws.node_env(1) 435 ).stdout.strip() 436 ws.node2_did = run( 437 [rad, "self", "--did"], env=ws.node_env(2) 438 ).stdout.strip() 439 440 ws.node1_nid = ws.node1_did.removeprefix("did:key:") 441 ws.node2_nid = ws.node2_did.removeprefix("did:key:") 442 443 ok(f"Node 1 DID: {ws.node1_did}") 444 ok(f"Node 2 DID: {ws.node2_did}") 445 446 assert ws.node1_did.startswith("did:key:z6Mk"), f"Unexpected DID format: {ws.node1_did}" 447 assert ws.node2_did.startswith("did:key:z6Mk"), f"Unexpected DID format: {ws.node2_did}" 448 449 450 def phase_2_create_identity(ws: Workspace, bins: dict[str, Path]) -> None: 451 """Create a KERI identity using the auths CLI.""" 452 phase("Phase 2: Create Auths identity") 453 454 auths = str(bins["auths"]) 455 456 ws.metadata_file.write_text(json.dumps({ 457 "xyz.radicle.project": {"name": "e2e-smoke-test"}, 458 "profile": {"name": "Smoke Test Identity"}, 459 })) 460 461 info("Creating identity...") 462 result = run( 463 [ 464 auths, "--repo", str(ws.auths_home), "id", "create", 465 "--metadata-file", str(ws.metadata_file), 466 "--local-key-alias", "identity-key", 467 ], 468 env=ws.auths_env(), 469 check=False, 470 ) 471 output = result.stdout + result.stderr 472 for line in output.strip().splitlines(): 473 print(f" {line}") 474 475 # Extract controller DID 476 for line in output.splitlines(): 477 if "Controller DID:" in line: 478 ws.controller_did = line.split("Controller DID:")[-1].strip() 479 break 480 481 if not ws.controller_did: 482 info("Falling back to `auths id show`...") 483 show = run( 484 [auths, "--repo", str(ws.auths_home), "id", "show"], 485 env=ws.auths_env(), 486 check=False, 487 ) 488 for line in (show.stdout + show.stderr).splitlines(): 489 if "Controller DID" in line: 490 ws.controller_did = line.split(":")[-1].strip() 491 # Reconstruct the full DID if truncated 492 if not ws.controller_did.startswith("did:"): 493 parts = line.split() 494 for p in parts: 495 if p.startswith("did:keri:"): 496 ws.controller_did = p 497 break 498 499 assert ws.controller_did, "Failed to extract controller DID" 500 ok(f"Controller DID: {ws.controller_did}") 501 502 503 def phase_3_link_devices(ws: Workspace, bins: dict[str, Path]) -> None: 504 """Import device keys and link both devices to the identity.""" 505 phase("Phase 3: Link devices to identity") 506 507 auths = str(bins["auths"]) 508 env = ws.auths_env() 509 510 for i, (alias, seed_path, did, note) in enumerate([ 511 ("node1-key", ws.node1_seed_path, ws.node1_did, "Radicle Device 1"), 512 ("node2-key", ws.node2_seed_path, ws.node2_did, "Radicle Device 2"), 513 ], 1): 514 info(f"Importing device {i} key...") 515 run( 516 [ 517 auths, "key", "import", 518 "--alias", alias, 519 "--seed-file", str(seed_path), 520 "--controller-did", ws.controller_did, 521 ], 522 env=env, 523 ) 524 ok(f"Device {i} key imported as '{alias}'") 525 526 info(f"Linking device {i}...") 527 run( 528 [ 529 auths, "--repo", str(ws.auths_home), "device", "link", 530 "--identity-key-alias", "identity-key", 531 "--device-key-alias", alias, 532 "--device-did", did, 533 "--note", note, 534 ], 535 env=env, 536 ) 537 ok(f"Device {i} linked: {did}") 538 539 # Verify 540 result = run( 541 [auths, "--repo", str(ws.auths_home), "device", "list"], 542 env=env, 543 ) 544 device_list = result.stdout 545 assert ws.node1_did in device_list, "Node 1 DID not in device list" 546 assert ws.node2_did in device_list, "Node 2 DID not in device list" 547 ok("Both devices appear in device list") 548 549 info("Device list:") 550 for line in device_list.strip().splitlines(): 551 print(f" {line}") 552 553 554 def phase_4_create_project(ws: Workspace, bins: dict[str, Path]) -> None: 555 """Create a new Radicle project (git repo) from node 1.""" 556 phase("Phase 4: Create Radicle project") 557 558 rad = str(bins["rad"]) 559 env = ws.node_env(1) 560 561 # Create a git repo first 562 ws.project_dir.mkdir(parents=True, exist_ok=True) 563 run(["git", "init"], cwd=ws.project_dir, env=env) 564 run(["git", "config", "user.name", "Smoke Tester"], cwd=ws.project_dir) 565 run(["git", "config", "user.email", "smoke@test.local"], cwd=ws.project_dir) 566 run(["git", "config", "commit.gpgsign", "false"], cwd=ws.project_dir) 567 568 (ws.project_dir / "README.md").write_text( 569 "# E2E Smoke Test Project\n\nCreated by the auths+radicle E2E smoke test.\n" 570 ) 571 run(["git", "add", "."], cwd=ws.project_dir, env=env) 572 run(["git", "commit", "-m", "Initial commit"], cwd=ws.project_dir, env=env) 573 574 ok("Git repo initialized with initial commit") 575 576 # Start node 1 temporarily to init the radicle project. 577 # Use an ephemeral port (not NODE1_P2P_PORT) so the real Phase 5 578 # node can bind cleanly after this one exits. 579 info("Starting node 1 for project init...") 580 run( 581 [rad, "node", "start", "--", "--listen", "0.0.0.0:0"], 582 env=env, 583 check=False, 584 ) 585 time.sleep(3) 586 587 info("Initializing Radicle project...") 588 result = run( 589 [ 590 rad, "init", 591 "--name", "e2e-smoke-test", 592 "--description", "Auths+Radicle E2E smoke test project", 593 "--public", 594 "--no-confirm", 595 ], 596 cwd=ws.project_dir, 597 env=env, 598 check=False, 599 ) 600 output = result.stdout + result.stderr 601 for line in output.strip().splitlines(): 602 print(f" {line}") 603 604 # Extract RID 605 for line in output.splitlines(): 606 for word in line.split(): 607 if word.startswith("rad:"): 608 ws.project_rid = word.rstrip(".") 609 break 610 if ws.project_rid: 611 break 612 613 if not ws.project_rid: 614 # Try rad inspect 615 inspect = run([rad, "inspect"], cwd=ws.project_dir, env=env, check=False) 616 for word in (inspect.stdout + inspect.stderr).split(): 617 if word.startswith("rad:"): 618 ws.project_rid = word.rstrip(".") 619 break 620 621 assert ws.project_rid, "Failed to extract project RID" 622 ok(f"Project RID: {ws.project_rid}") 623 624 # Stop the temporary node and wait for full cleanup 625 info("Stopping temporary node...") 626 run([rad, "node", "stop"], env=env, check=False) 627 # Wait for the daemon to fully exit (control socket removed) 628 control_sock = ws.node1_home / "node" / "control.sock" 629 for _ in range(10): 630 if not control_sock.exists(): 631 break 632 time.sleep(0.5) 633 # Verify it's actually stopped 634 for _ in range(5): 635 r = run([rad, "node", "status"], env=env, check=False) 636 if r.returncode != 0: 637 break 638 time.sleep(1) 639 time.sleep(1) 640 641 642 def phase_5_start_node(ws: Workspace, bins: dict[str, Path]) -> None: 643 """Start node 1 as persistent background process. Connect node 2.""" 644 phase("Phase 5: Start Radicle nodes") 645 646 rad = str(bins["rad"]) 647 648 info(f"Starting node 1 (P2P: {NODE1_P2P_PORT})...") 649 run( 650 [rad, "node", "start", "--", "--listen", f"0.0.0.0:{NODE1_P2P_PORT}"], 651 env=ws.node_env(1), 652 check=False, 653 ) 654 time.sleep(3) 655 656 info(f"Starting node 2 (P2P: {NODE2_P2P_PORT})...") 657 run( 658 [rad, "node", "start", "--", "--listen", f"0.0.0.0:{NODE2_P2P_PORT}"], 659 env=ws.node_env(2), 660 check=False, 661 ) 662 time.sleep(3) 663 664 # Verify nodes are running via rad node status 665 for node_num, home in [(1, ws.node1_home), (2, ws.node2_home)]: 666 node_env = ws.node_env(node_num) 667 r = run([rad, "node", "status"], env=node_env, check=False) 668 if r.returncode == 0: 669 ok(f"Node {node_num} is running") 670 else: 671 # Node logs may be rotated (node.log, node.log.1, node.log.2) 672 node_dir = home / "node" 673 log_files = sorted(node_dir.glob("node.log*"), reverse=True) if node_dir.exists() else [] 674 if log_files: 675 fail(f"Node {node_num} failed to start. Last log lines ({log_files[0].name}):") 676 for line in log_files[0].read_text().strip().splitlines()[-5:]: 677 print(f" {_c(DIM, line)}") 678 else: 679 fail(f"Node {node_num} failed to start (no log found)") 680 raise RuntimeError(f"Node {node_num} not running") 681 682 # Connect nodes to each other (bidirectional) 683 info("Connecting node 2 to node 1...") 684 run( 685 [rad, "node", "connect", f"{ws.node1_nid}@127.0.0.1:{NODE1_P2P_PORT}", 686 "--timeout", "10"], 687 env=ws.node_env(2), 688 check=False, 689 ) 690 info("Connecting node 1 to node 2...") 691 run( 692 [rad, "node", "connect", f"{ws.node2_nid}@127.0.0.1:{NODE2_P2P_PORT}", 693 "--timeout", "10"], 694 env=ws.node_env(1), 695 check=False, 696 ) 697 time.sleep(2) 698 ok("Nodes connected") 699 700 701 def phase_6_push_patches(ws: Workspace, bins: dict[str, Path]) -> dict[str, str]: 702 """Push signed patches from both devices.""" 703 phase("Phase 6: Signed patches from both devices") 704 705 rad = str(bins["rad"]) 706 auths = str(bins["auths"]) 707 auths_sign = str(bins["auths_sign"]) 708 env_base = ws.auths_env() 709 710 # Export public keys for allowed_signers 711 info("Exporting device public keys...") 712 pub1 = run( 713 [auths, "key", "export", "--alias", "node1-key", 714 "--passphrase", "e2e-smoke-test", "--format", "pub"], 715 env=env_base, 716 ).stdout.strip() 717 pub2 = run( 718 [auths, "key", "export", "--alias", "node2-key", 719 "--passphrase", "e2e-smoke-test", "--format", "pub"], 720 env=env_base, 721 ).stdout.strip() 722 723 ws.allowed_signers.write_text( 724 f"smoke@test.local {pub1}\nsmoke@test.local {pub2}\n" 725 ) 726 ok("allowed_signers file created") 727 728 # Configure git signing for the project 729 for key, val in [ 730 ("gpg.format", "ssh"), 731 ("gpg.ssh.program", auths_sign), 732 ("gpg.ssh.allowedSignersFile", str(ws.allowed_signers)), 733 ("commit.gpgsign", "true"), 734 ]: 735 run(["git", "config", key, val], cwd=ws.project_dir) 736 737 patch_ids: dict[str, str] = {} 738 739 # ── Device 1: signed commit + push patch ────────────────────────── 740 info("Device 1: creating signed commit...") 741 run(["git", "config", "user.signingKey", "auths:node1-key"], cwd=ws.project_dir) 742 run( 743 ["git", "checkout", "-b", "feature-device1"], 744 cwd=ws.project_dir, 745 env={**ws.node_env(1), **env_base}, 746 check=False, 747 ) 748 (ws.project_dir / "device1.txt").write_text("Change from device 1\n") 749 run(["git", "add", "device1.txt"], cwd=ws.project_dir, env={**ws.node_env(1), **env_base}) 750 run( 751 ["git", "commit", "-m", "Signed commit from device 1"], 752 cwd=ws.project_dir, 753 env={**ws.node_env(1), **env_base}, 754 ) 755 ok("Device 1 signed commit created") 756 757 info("Device 1: pushing patch...") 758 push1 = run( 759 ["git", "push", "rad", "HEAD:refs/patches"], 760 cwd=ws.project_dir, 761 env={**ws.node_env(1), **env_base}, 762 check=False, 763 ) 764 push1_out = push1.stdout + push1.stderr 765 for line in push1_out.strip().splitlines(): 766 print(f" {line}") 767 768 # Extract patch ID 769 for word in push1_out.split(): 770 if len(word) == 40 and all(c in "0123456789abcdef" for c in word): 771 patch_ids["device1"] = word 772 break 773 774 if patch_ids.get("device1"): 775 ok(f"Device 1 patch: {patch_ids['device1']}") 776 else: 777 warn("Could not extract device 1 patch ID from push output") 778 779 # ── Device 2: signed commit + push patch via node 1 ───────────── 780 # 781 # Instead of cloning to a separate node and syncing (which is fragile 782 # due to Radicle sigrefs divergence), device 2 creates its patch 783 # directly in node 1's working copy using device 2's signing key. 784 # This proves two different devices can contribute under a single 785 # KERI identity without requiring inter-node gossip sync. 786 info("Device 2: creating signed commit via node 1...") 787 run( 788 ["git", "checkout", "master"], 789 cwd=ws.project_dir, 790 env={**ws.node_env(1), **env_base}, 791 check=False, 792 ) 793 run(["git", "config", "user.signingKey", "auths:node2-key"], cwd=ws.project_dir) 794 run(["git", "config", "user.name", "Smoke Tester Device2"], cwd=ws.project_dir) 795 run( 796 ["git", "checkout", "-b", "feature-device2"], 797 cwd=ws.project_dir, 798 env={**ws.node_env(1), **env_base}, 799 check=False, 800 ) 801 (ws.project_dir / "device2.txt").write_text("Change from device 2\n") 802 run( 803 ["git", "add", "device2.txt"], 804 cwd=ws.project_dir, 805 env={**ws.node_env(1), **env_base}, 806 ) 807 run( 808 ["git", "commit", "-m", "Signed commit from device 2"], 809 cwd=ws.project_dir, 810 env={**ws.node_env(1), **env_base}, 811 ) 812 ok("Device 2 signed commit created") 813 814 info("Device 2: pushing patch via node 1...") 815 push2 = run( 816 ["git", "push", "rad", "HEAD:refs/patches"], 817 cwd=ws.project_dir, 818 env={**ws.node_env(1), **env_base}, 819 check=False, 820 ) 821 push2_out = push2.stdout + push2.stderr 822 for line in push2_out.strip().splitlines(): 823 print(f" {line}") 824 825 for word in push2_out.split(): 826 if len(word) == 40 and all(c in "0123456789abcdef" for c in word): 827 patch_ids["device2"] = word 828 break 829 830 if patch_ids.get("device2"): 831 ok(f"Device 2 patch: {patch_ids['device2']}") 832 else: 833 warn("Could not extract device 2 patch ID from push output") 834 835 # Restore git config for device 1 836 run(["git", "config", "user.signingKey", "auths:node1-key"], cwd=ws.project_dir) 837 run(["git", "config", "user.name", "Smoke Tester"], cwd=ws.project_dir) 838 839 return patch_ids 840 841 842 def phase_7_start_httpd(ws: Workspace, bins: dict[str, Path]) -> None: 843 """Start radicle-httpd serving node 1's storage.""" 844 phase("Phase 7: Start radicle-httpd") 845 846 httpd = str(bins["httpd"]) 847 848 info(f"Starting radicle-httpd on port {HTTPD_PORT}...") 849 httpd_env = {**ws.node_env(1), "AUTHS_HOME": str(ws.auths_home)} 850 proc = spawn( 851 [httpd, "--listen", f"0.0.0.0:{HTTPD_PORT}"], 852 env=httpd_env, 853 log_path=ws.logs_dir / "httpd.log", 854 ) 855 ws.register_proc(proc) 856 857 url = f"http://127.0.0.1:{HTTPD_PORT}/api/v1" 858 if wait_for_http(url, timeout=15, label="radicle-httpd"): 859 ok(f"radicle-httpd is ready at http://127.0.0.1:{HTTPD_PORT}") 860 else: 861 fail("radicle-httpd failed to start. Check logs/httpd.log") 862 raise RuntimeError("httpd not ready") 863 864 865 def phase_8_start_frontend(ws: Workspace, args: argparse.Namespace) -> None: 866 """Build and serve the radicle-explorer frontend.""" 867 phase("Phase 8: Start radicle-explorer frontend") 868 869 if args.no_frontend: 870 warn("--no-frontend specified, skipping frontend") 871 return 872 873 if not EXPLORER_REPO.exists(): 874 warn(f"radicle-explorer not found at {EXPLORER_REPO}, skipping frontend") 875 return 876 877 if not args.skip_build: 878 info("Building @auths/verifier WASM module...") 879 wasm_out = VERIFIER_TS / "wasm" 880 run( 881 [ 882 "wasm-pack", "build", 883 "--target", "bundler", 884 "--no-default-features", 885 "--features", "wasm", 886 ], 887 cwd=VERIFIER_CRATE, 888 capture=False, 889 timeout=300, 890 ) 891 # Copy wasm-pack output from default pkg/ to verifier-ts/wasm/ 892 pkg_dir = VERIFIER_CRATE / "pkg" 893 if pkg_dir.exists(): 894 if wasm_out.exists(): 895 shutil.rmtree(wasm_out) 896 shutil.copytree(pkg_dir, wasm_out) 897 # npm respects .gitignore when packing file: deps; .npmignore overrides it 898 (wasm_out / ".npmignore").write_text("# Override .gitignore for npm\n.gitignore\n") 899 ok("WASM module built") 900 901 info("Installing @auths/verifier TypeScript dependencies...") 902 run(["npm", "install"], cwd=VERIFIER_TS, capture=False, timeout=120) 903 ok("verifier-ts dependencies installed") 904 905 info("Building @auths/verifier TypeScript...") 906 run(["npm", "run", "build:ts"], cwd=VERIFIER_TS, capture=False, timeout=120) 907 ok("@auths/verifier built") 908 909 info("Installing npm dependencies (fresh @auths/verifier)...") 910 # Remove cached copy so npm re-packs the file: dependency with wasm files 911 cached = EXPLORER_REPO / "node_modules" / "@auths" / "verifier" 912 if cached.is_symlink(): 913 cached.unlink() 914 elif cached.exists(): 915 shutil.rmtree(cached) 916 run(["npm", "install"], cwd=EXPLORER_REPO, capture=False, timeout=300) 917 ok("npm install complete") 918 919 info("Building frontend...") 920 run(["npm", "run", "build"], cwd=EXPLORER_REPO, capture=False, timeout=300) 921 ok("Frontend built") 922 923 info(f"Serving frontend on port {FRONTEND_PORT}...") 924 proc = spawn( 925 ["npm", "run", "serve", "--", "--strictPort", "--port", str(FRONTEND_PORT)], 926 cwd=EXPLORER_REPO, 927 log_path=ws.logs_dir / "frontend.log", 928 env={ 929 **os.environ, 930 "DEFAULT_LOCAL_HTTPD_PORT": str(HTTPD_PORT), 931 "DEFAULT_HTTPD_SCHEME": "http", 932 }, 933 ) 934 ws.register_proc(proc) 935 936 url = f"http://localhost:{FRONTEND_PORT}" 937 if wait_for_http(url, timeout=30, label="frontend"): 938 ok(f"Frontend is ready at {url}") 939 else: 940 warn("Frontend failed to start. Check logs/frontend.log") 941 942 943 def phase_9_verify_api(ws: Workspace, patch_ids: dict[str, str]) -> None: 944 """Run HTTP assertions against the API.""" 945 phase("Phase 9: Verify HTTP API") 946 947 base = f"http://127.0.0.1:{HTTPD_PORT}/api/v1" 948 949 # ── Node info ───────────────────────────────────────────────────── 950 info("Checking /api/v1 ...") 951 try: 952 root = http_get_json(base) 953 ok(f"API root responded: {root.get('service', 'unknown')}") 954 except Exception as e: 955 fail(f"API root failed: {e}") 956 return 957 958 # ── Delegates endpoint ──────────────────────────────────────────── 959 info(f"Checking delegates endpoint for {ws.controller_did} ...") 960 try: 961 user = http_get_json(f"{base}/delegates/{ws.controller_did}") 962 info(f"Response: {json.dumps(user, indent=2)[:500]}") 963 964 assert user.get("isKeri") is True, f"Expected isKeri=true, got {user.get('isKeri')}" 965 ok("isKeri: true") 966 967 controller_did = user.get("controllerDid") 968 assert controller_did, "controllerDid is empty" 969 ok(f"controllerDid: {controller_did}") 970 971 devices = user.get("devices", []) 972 assert len(devices) >= 2, f"Expected >= 2 devices, got {len(devices)}" 973 ok(f"devices: {len(devices)} linked") 974 975 assert user.get("isAbandoned") is False, "Identity should not be abandoned" 976 ok("isAbandoned: false") 977 978 except urllib.error.HTTPError as e: 979 if e.code == 404: 980 warn("Delegates endpoint returned 404. Modified httpd may not be running.") 981 warn("Skipping remaining API assertions.") 982 return 983 raise 984 except AssertionError as e: 985 fail(str(e)) 986 return 987 988 # ── KEL endpoint ────────────────────────────────────────────────── 989 info("Checking KEL endpoint...") 990 try: 991 kel = http_get_json(f"{base}/identity/{ws.controller_did}/kel") 992 assert isinstance(kel, list), f"Expected array, got {type(kel)}" 993 assert len(kel) > 0, "KEL is empty" 994 ok(f"KEL: {len(kel)} events") 995 except urllib.error.HTTPError as e: 996 warn(f"KEL endpoint failed: {e.code}") 997 except AssertionError as e: 998 fail(str(e)) 999 1000 # ── Attestations endpoint ───────────────────────────────────────── 1001 info("Checking attestations endpoint...") 1002 try: 1003 atts = http_get_json(f"{base}/identity/{ws.controller_did}/attestations") 1004 assert isinstance(atts, list), f"Expected array, got {type(atts)}" 1005 assert len(atts) > 0, "Attestations empty" 1006 ok(f"Attestations: {len(atts)} returned") 1007 except urllib.error.HTTPError as e: 1008 warn(f"Attestations endpoint failed: {e.code}") 1009 except AssertionError as e: 1010 fail(str(e)) 1011 1012 # ── Repos endpoint ──────────────────────────────────────────────── 1013 info("Checking repos...") 1014 try: 1015 repos = http_get_json(f"{base}/repos") 1016 assert isinstance(repos, list), f"Expected array, got {type(repos)}" 1017 ok(f"Repos: {len(repos)} listed") 1018 found = any(ws.project_rid in str(r) for r in repos) 1019 if found: 1020 ok(f"Project {ws.project_rid} found in repo list") 1021 else: 1022 warn(f"Project {ws.project_rid} not in repo list (may need sync)") 1023 except Exception as e: 1024 warn(f"Repos check failed: {e}") 1025 1026 # ── Patches ─────────────────────────────────────────────────────── 1027 if ws.project_rid: 1028 rid_encoded = ws.project_rid 1029 info(f"Checking patches for {rid_encoded}...") 1030 try: 1031 patches = http_get_json(f"{base}/repos/{rid_encoded}/patches") 1032 assert isinstance(patches, list), f"Expected array, got {type(patches)}" 1033 ok(f"Patches: {len(patches)} found") 1034 for p in patches: 1035 pid = p.get("id", "?")[:12] 1036 state = p.get("state", {}) 1037 author_did = p.get("author", {}).get("id", "?") 1038 print(f" Patch {pid}... state={state} author={author_did}") 1039 except Exception as e: 1040 warn(f"Patches check failed: {e}") 1041 1042 1043 def phase_10_summary( 1044 ws: Workspace, 1045 patch_ids: dict[str, str], 1046 args: argparse.Namespace, 1047 ) -> None: 1048 """Print final summary with URLs for manual inspection.""" 1049 phase("Summary") 1050 1051 print(_c(CYAN, " Identities:")) 1052 print(f" Controller DID: {_c(BOLD, ws.controller_did)}") 1053 print(f" Device 1 DID: {ws.node1_did}") 1054 print(f" Device 2 DID: {ws.node2_did}") 1055 print(f" Project RID: {ws.project_rid}") 1056 print() 1057 1058 print(_c(CYAN, " Services:")) 1059 print(f" radicle-httpd: http://127.0.0.1:{HTTPD_PORT}/api/v1") 1060 if not args.no_frontend: 1061 print(f" Frontend: http://localhost:{FRONTEND_PORT}") 1062 print() 1063 1064 print(_c(CYAN, " URLs to verify manually:")) 1065 httpd_url = f"http://127.0.0.1:{HTTPD_PORT}/api/v1" 1066 print(f" API - Delegates: {httpd_url}/delegates/{ws.controller_did}") 1067 print(f" API - KEL: {httpd_url}/identity/{ws.controller_did}/kel") 1068 print(f" API - Attestations: {httpd_url}/identity/{ws.controller_did}/attestations") 1069 if ws.project_rid: 1070 print(f" API - Patches: {httpd_url}/repos/{ws.project_rid}/patches") 1071 print() 1072 1073 if not args.no_frontend: 1074 fe = f"http://localhost:{FRONTEND_PORT}" 1075 print(_c(CYAN, " Frontend URLs:")) 1076 print(f" Node view: {fe}/nodes/127.0.0.1:{HTTPD_PORT}") 1077 if ws.project_rid: 1078 print(f" Project: {fe}/nodes/127.0.0.1:{HTTPD_PORT}/{ws.project_rid}") 1079 print(f" User profile: {fe}/nodes/127.0.0.1:{HTTPD_PORT}/users/{ws.controller_did}") 1080 if ws.node1_did: 1081 print(f" Device 1: {fe}/nodes/127.0.0.1:{HTTPD_PORT}/devices/{ws.node1_did}") 1082 if ws.node2_did: 1083 print(f" Device 2: {fe}/nodes/127.0.0.1:{HTTPD_PORT}/devices/{ws.node2_did}") 1084 print() 1085 1086 print(_c(CYAN, " Logs:")) 1087 print(f" {ws.logs_dir}") 1088 print() 1089 1090 if args.open_browser and not args.no_frontend: 1091 import webbrowser 1092 1093 url = f"http://localhost:{FRONTEND_PORT}/nodes/127.0.0.1:{HTTPD_PORT}/users/{ws.controller_did}" 1094 info(f"Opening browser: {url}") 1095 webbrowser.open(url) 1096 1097 1098 # ── Main ───────────────────────────────────────────────────────────────────── 1099 1100 1101 def parse_args() -> argparse.Namespace: 1102 parser = argparse.ArgumentParser( 1103 description="Radicle + Auths Full-Stack E2E Smoke Test", 1104 formatter_class=argparse.RawDescriptionHelpFormatter, 1105 epilog=textwrap.dedent("""\ 1106 This script orchestrates the full local stack: auths CLI, two Radicle 1107 nodes, radicle-httpd, and the radicle-explorer frontend. Both devices 1108 push signed patches, and the resulting state is verified via HTTP API 1109 and available for manual browser inspection. 1110 """), 1111 ) 1112 parser.add_argument( 1113 "--skip-build", action="store_true", 1114 help="Skip cargo/npm builds (use existing binaries)", 1115 ) 1116 parser.add_argument( 1117 "--keep-alive", action="store_true", 1118 help="Keep all services running after tests for manual inspection", 1119 ) 1120 parser.add_argument( 1121 "--no-frontend", action="store_true", 1122 help="Skip building and serving the frontend", 1123 ) 1124 parser.add_argument( 1125 "--open-browser", action="store_true", 1126 help="Open browser to the user profile page at the end", 1127 ) 1128 parser.add_argument( 1129 "--workspace", type=Path, default=None, 1130 help="Use a specific workspace directory instead of a tmpdir", 1131 ) 1132 return parser.parse_args() 1133 1134 1135 def main() -> None: 1136 args = parse_args() 1137 1138 os.environ.setdefault("RUSTUP_TOOLCHAIN", "1.93") 1139 1140 print() 1141 print(_c(CYAN, " Radicle + Auths Full-Stack E2E Smoke Test")) 1142 print(_c(DIM, " ─────────────────────────────────────────")) 1143 print() 1144 1145 if args.workspace: 1146 ws_root = args.workspace 1147 ws_root.mkdir(parents=True, exist_ok=True) 1148 else: 1149 # Use /tmp directly (not the macOS per-user /var/folders/.../T/) to keep 1150 # paths short. Radicle's control socket path must fit within the Unix 1151 # SUN_LEN limit (104 chars on macOS). The default tempfile.mkdtemp() 1152 # uses a long per-user path that pushes the socket path over the limit. 1153 short_tmp = Path("/tmp/ae2e") 1154 if short_tmp.exists(): 1155 shutil.rmtree(short_tmp) 1156 short_tmp.mkdir(parents=True) 1157 ws_root = short_tmp 1158 info(f"Workspace: {ws_root}") 1159 1160 ws = Workspace(root=ws_root) 1161 rad_bin: str | None = None 1162 1163 def cleanup() -> None: 1164 print() 1165 info("Cleaning up...") 1166 ws.cleanup(rad=rad_bin) 1167 if not args.workspace and not args.keep_alive: 1168 shutil.rmtree(ws_root, ignore_errors=True) 1169 info("Workspace removed") 1170 1171 atexit.register(cleanup) 1172 1173 def signal_handler(sig: int, frame: Any) -> None: 1174 print() 1175 warn("Interrupted. Cleaning up...") 1176 cleanup() 1177 sys.exit(1) 1178 1179 signal.signal(signal.SIGINT, signal_handler) 1180 signal.signal(signal.SIGTERM, signal_handler) 1181 1182 try: 1183 # Phase 0: Prerequisites 1184 bins = phase_0_prerequisites(args) 1185 rad_bin = str(bins["rad"]) 1186 1187 # Phase 1: Two radicle nodes 1188 phase_1_setup_nodes(ws, bins) 1189 1190 # Phase 2: Create KERI identity 1191 phase_2_create_identity(ws, bins) 1192 1193 # Phase 3: Link both devices 1194 phase_3_link_devices(ws, bins) 1195 1196 # Phase 4: Create project 1197 phase_4_create_project(ws, bins) 1198 1199 # Phase 5: Start nodes 1200 phase_5_start_node(ws, bins) 1201 1202 # Phase 6: Push patches from both devices 1203 patch_ids = phase_6_push_patches(ws, bins) 1204 1205 # Phase 7: Start httpd 1206 phase_7_start_httpd(ws, bins) 1207 1208 # Phase 8: Start frontend 1209 phase_8_start_frontend(ws, args) 1210 1211 # Phase 9: Verify API 1212 phase_9_verify_api(ws, patch_ids) 1213 1214 # Phase 10: Summary 1215 phase_10_summary(ws, patch_ids, args) 1216 1217 if args.keep_alive: 1218 print() 1219 print(_c(GREEN, _c(BOLD, " All services running. Press Ctrl+C to stop."))) 1220 print() 1221 try: 1222 while True: 1223 time.sleep(1) 1224 except KeyboardInterrupt: 1225 pass 1226 1227 except Exception as e: 1228 print() 1229 fail(f"E2E test failed: {e}") 1230 print() 1231 print(_c(DIM, f" Workspace preserved at: {ws_root}")) 1232 print(_c(DIM, f" Logs at: {ws.logs_dir}")) 1233 sys.exit(1) 1234 1235 1236 if __name__ == "__main__": 1237 main()