/ native-host / workspaces-bridge.py
workspaces-bridge.py
1 #!/usr/bin/env python3 2 """ 3 Mnemonic Workspaces Bridge - FastAPI REST Service 4 5 A REST-based service for syncing Mnemonic workspaces via Syncthing. 6 Run as a background service and configure both Chrome and Firefox 7 extensions to connect to the same endpoint. 8 9 Usage: 10 python workspaces-bridge.py [--host HOST] [--port PORT] [--config-dir DIR] 11 12 Example: 13 python workspaces-bridge.py --host 127.0.0.1 --port 8765 14 """ 15 16 import argparse 17 import json 18 import logging 19 import logging.handlers 20 import os 21 import re 22 import shutil 23 import threading 24 import time 25 import uuid 26 from contextlib import asynccontextmanager 27 from datetime import datetime, timezone, timedelta 28 from pathlib import Path 29 from typing import Any, Dict, List, Optional 30 31 from fastapi import FastAPI, HTTPException 32 from fastapi.responses import RedirectResponse 33 from fastapi.middleware.cors import CORSMiddleware 34 from fastapi.requests import Request 35 from fastapi.templating import Jinja2Templates 36 from pydantic import BaseModel, Field 37 from urllib.parse import urlparse 38 39 try: 40 from todoist_api_python.api_async import TodoistAPIAsync 41 TODOIST_SDK_AVAILABLE = True 42 except ImportError: 43 TODOIST_SDK_AVAILABLE = False 44 45 # ============================================================================= 46 # Logging Setup 47 # ============================================================================= 48 49 logger = logging.getLogger("mnemonic-bridge") 50 logger.setLevel(logging.INFO) 51 52 # Console handler (replaces print statements) 53 _console_handler = logging.StreamHandler() 54 _console_handler.setFormatter(logging.Formatter("[%(name)s] %(message)s")) 55 logger.addHandler(_console_handler) 56 57 58 def setup_file_logging(config_dir: Path) -> None: 59 """Add rotating file handler once config_dir is known.""" 60 log_path = config_dir / "mnemonic-bridge.log" 61 file_handler = logging.handlers.RotatingFileHandler( 62 log_path, maxBytes=2 * 1024 * 1024, backupCount=3, encoding="utf-8" 63 ) 64 file_handler.setFormatter( 65 logging.Formatter("%(asctime)s [%(levelname)s] %(message)s", datefmt="%Y-%m-%d %H:%M:%S") 66 ) 67 logger.addHandler(file_handler) 68 logger.info("File logging enabled: %s", log_path) 69 70 71 def _summarize_workspaces(data: Dict[str, Any]) -> str: 72 """Build a human-readable summary of per-browser workspace counts and tab counts.""" 73 parts: List[str] = [] 74 browsers = data.get("browsers", {}) 75 for browser_key, browser_data in browsers.items(): 76 if not isinstance(browser_data, dict): 77 continue 78 workspaces = browser_data.get("workspaces", []) 79 ws_count = len(workspaces) 80 tab_parts = [] 81 for ws in workspaces: 82 name = ws.get("name", "?") 83 tabs = ws.get("tabs") 84 tab_count = len(tabs) if isinstance(tabs, list) else "-" 85 tab_parts.append(f"{name}={tab_count}") 86 tabs_str = ", ".join(tab_parts) if tab_parts else "none" 87 parts.append(f"{browser_key}: {ws_count} workspaces, tabs: [{tabs_str}]") 88 return " | ".join(parts) if parts else "empty" 89 90 # ============================================================================= 91 # Configuration 92 # ============================================================================= 93 94 API_VERSION = "2.0.0" 95 SCHEMA_VERSION = 1 96 MIN_CLIENT_VERSION = "2.0.0" 97 98 DEFAULT_HOST = "127.0.0.1" 99 DEFAULT_PORT = 8765 100 DEFAULT_CONFIG_DIR = Path.home() / "Syncthing" / "MnemonicWorkspaces" 101 102 CONFIG_FILE = "workspaces.json" 103 CREDENTIALS_FILE = "credentials.json" 104 LOCK_FILE = ".workspaces.lock" 105 CREDENTIALS_LOCK_FILE = ".credentials.lock" 106 BACKUP_SUFFIX = ".bak" 107 BACKUP_DIR = "backups" 108 BACKUP_RETENTION_DAYS = 30 109 TODOIST_MASTER_PROJECT_NAME = "Mnemonic" 110 111 112 # ============================================================================= 113 # Pydantic Models - Request/Response Schemas 114 # ============================================================================= 115 116 117 class VersionInfo(BaseModel): 118 """API version information for compatibility checks.""" 119 api_version: str = Field(..., description="Current API version") 120 schema_version: int = Field(..., description="Data schema version") 121 min_client_version: str = Field(..., description="Minimum supported client version") 122 123 124 class HealthResponse(BaseModel): 125 """Health check response.""" 126 status: str = Field(default="ok", description="Service status") 127 version: VersionInfo 128 config_dir: str = Field(..., description="Configuration directory path") 129 timestamp: str = Field(..., description="Current server timestamp (ISO format)") 130 131 132 class FileInfoResponse(BaseModel): 133 """File metadata response.""" 134 success: bool = True 135 exists: bool = Field(..., description="Whether the file exists") 136 path: str = Field(..., description="Full path to the file") 137 mtime: Optional[float] = Field(None, description="File modification time (Unix timestamp)") 138 mtime_iso: Optional[str] = Field(None, description="File modification time (ISO format)") 139 size: Optional[int] = Field(None, description="File size in bytes") 140 141 142 class ReadResponse(BaseModel): 143 """Read operation response.""" 144 success: bool = True 145 exists: bool = Field(..., description="Whether the file exists") 146 data: Optional[Dict[str, Any]] = Field(None, description="File contents as JSON") 147 mtime: Optional[float] = Field(None, description="File modification time") 148 149 150 class WriteRequest(BaseModel): 151 """Write operation request.""" 152 data: Dict[str, Any] = Field(..., description="Data to write (JSON object)") 153 154 155 class WriteResponse(BaseModel): 156 """Write operation response.""" 157 success: bool = True 158 mtime: float = Field(..., description="New file modification time") 159 path: str = Field(..., description="Path to written file") 160 161 162 class DeleteResponse(BaseModel): 163 """Delete operation response.""" 164 success: bool = True 165 message: str = Field(default="File deleted", description="Operation result message") 166 167 168 class RestoreResponse(BaseModel): 169 """Restore from backup response.""" 170 success: bool = True 171 mtime: float = Field(..., description="Restored file modification time") 172 message: str = Field(default="Backup restored", description="Operation result message") 173 174 175 class ErrorResponse(BaseModel): 176 """Error response.""" 177 success: bool = False 178 error: str = Field(..., description="Error message") 179 code: str = Field(..., description="Error code for programmatic handling") 180 181 182 # ============================================================================= 183 # Versioned Backup Models 184 # ============================================================================= 185 186 187 class BackupInfo(BaseModel): 188 """Information about a single backup.""" 189 filename: str = Field(..., description="Backup filename") 190 timestamp: str = Field(..., description="Backup timestamp (ISO format)") 191 timestamp_unix: float = Field(..., description="Backup timestamp (Unix seconds)") 192 size: int = Field(..., description="File size in bytes") 193 age_days: float = Field(..., description="Age of backup in days") 194 workspace_count: Optional[int] = Field(None, description="Number of workspaces in backup") 195 196 197 class BackupListResponse(BaseModel): 198 """Response for listing backups.""" 199 success: bool = True 200 backups: list[BackupInfo] = Field(default_factory=list, description="List of backups") 201 total_count: int = Field(..., description="Total number of backups") 202 total_size: int = Field(..., description="Total size of all backups in bytes") 203 204 205 class BackupPreviewResponse(BaseModel): 206 """Response for previewing a backup.""" 207 success: bool = True 208 filename: str = Field(..., description="Backup filename") 209 timestamp: str = Field(..., description="Backup timestamp (ISO format)") 210 data: Dict[str, Any] = Field(..., description="Backup data contents") 211 212 213 class RestoreFromBackupRequest(BaseModel): 214 """Request for restoring from a specific backup.""" 215 filename: str = Field(..., description="Backup filename to restore from") 216 217 218 class RestoreFromBackupResponse(BaseModel): 219 """Response for restoring from a specific backup.""" 220 success: bool = True 221 restored_from: str = Field(..., description="Filename restored from") 222 mtime: float = Field(..., description="New file modification time") 223 message: str = Field(default="Backup restored", description="Operation result message") 224 225 226 class PruneBackupsResponse(BaseModel): 227 """Response for pruning old backups.""" 228 success: bool = True 229 deleted_count: int = Field(..., description="Number of backups deleted") 230 remaining_count: int = Field(..., description="Number of backups remaining") 231 message: str = Field(default="Backups pruned", description="Operation result message") 232 233 234 # ============================================================================= 235 # File Lock and CacheIO Implementation 236 # ============================================================================= 237 238 import fcntl 239 import tempfile 240 import threading 241 from contextlib import contextmanager 242 243 244 class FileLock: 245 """ 246 OS-level file locking using fcntl.flock(). 247 248 Features: 249 - Exclusive locks (LOCK_EX) for file operations 250 - Separate lock file allows atomic rename while holding lock 251 - Blocking with timeout support 252 """ 253 254 def __init__(self, lock_path: Path, timeout: float = 10.0): 255 self.lock_path = lock_path 256 self.timeout = timeout 257 self._lock_fd: Optional[int] = None 258 259 def _ensure_lock_file(self) -> None: 260 """Ensure the lock file exists.""" 261 self.lock_path.parent.mkdir(parents=True, exist_ok=True) 262 if not self.lock_path.exists(): 263 self.lock_path.touch() 264 265 def acquire(self) -> bool: 266 """Acquire exclusive lock with timeout.""" 267 self._ensure_lock_file() 268 self._lock_fd = os.open(str(self.lock_path), os.O_RDWR | os.O_CREAT) 269 270 start_time = time.time() 271 while True: 272 try: 273 fcntl.flock(self._lock_fd, fcntl.LOCK_EX | fcntl.LOCK_NB) 274 return True 275 except BlockingIOError: 276 if time.time() - start_time > self.timeout: 277 os.close(self._lock_fd) 278 self._lock_fd = None 279 return False 280 time.sleep(0.05) 281 282 def release(self) -> None: 283 """Release the lock.""" 284 if self._lock_fd is not None: 285 try: 286 fcntl.flock(self._lock_fd, fcntl.LOCK_UN) 287 os.close(self._lock_fd) 288 except (IOError, OSError): 289 pass 290 finally: 291 self._lock_fd = None 292 293 def __enter__(self): 294 if not self.acquire(): 295 raise TimeoutError(f"Could not acquire lock on {self.lock_path} within {self.timeout}s") 296 return self 297 298 def __exit__(self, exc_type, exc_val, exc_tb): 299 self.release() 300 return False 301 302 303 class CacheIO: 304 """ 305 In-memory cache with file persistence. 306 307 Features: 308 - Fast reads from memory (no file I/O for API requests) 309 - Detects external changes via mtime (Syncthing updates) 310 - Atomic writes with exclusive file locking 311 - Thread-safe operations 312 313 Flow: 314 1. On read: Check file mtime. If changed externally, reload cache. 315 2. On write: Update cache, then atomically flush to file. 316 3. External changes (Syncthing) detected on next read via mtime. 317 """ 318 319 def __init__(self, data_path: Path, lock_path: Path): 320 self.data_path = data_path 321 self.lock_path = lock_path 322 self._cache: Optional[Dict[str, Any]] = None 323 self._cache_mtime: float = 0.0 324 self._lock = threading.Lock() # In-process lock for cache access 325 326 def _get_file_mtime(self) -> float: 327 """Get file modification time, or 0 if file doesn't exist.""" 328 try: 329 return self.data_path.stat().st_mtime 330 except (FileNotFoundError, OSError): 331 return 0.0 332 333 def _load_from_file(self) -> Optional[Dict[str, Any]]: 334 """Load data from file with exclusive lock.""" 335 if not self.data_path.exists(): 336 return None 337 338 file_lock = FileLock(self.lock_path) 339 try: 340 with file_lock: 341 content = self.data_path.read_text(encoding="utf-8") 342 data = json.loads(content) 343 self._cache_mtime = self._get_file_mtime() 344 return data 345 except (json.JSONDecodeError, IOError, OSError, TimeoutError) as e: 346 logger.error("CacheIO: Failed to load file: %s", e) 347 return None 348 349 def _flush_to_file(self, data: Dict[str, Any]) -> float: 350 """ 351 Atomically write data to file. 352 Returns new mtime on success, raises on failure. 353 """ 354 file_lock = FileLock(self.lock_path) 355 with file_lock: 356 # Atomic write: temp file then rename 357 temp_fd, temp_path_str = tempfile.mkstemp( 358 dir=self.data_path.parent, 359 prefix=".workspaces_", 360 suffix=".tmp" 361 ) 362 temp_path = Path(temp_path_str) 363 try: 364 content = json.dumps(data, indent=2, ensure_ascii=False) 365 os.write(temp_fd, content.encode("utf-8")) 366 os.close(temp_fd) 367 temp_fd = None 368 # Atomic rename 369 os.rename(temp_path, self.data_path) 370 return self._get_file_mtime() 371 except: 372 if temp_fd is not None: 373 os.close(temp_fd) 374 if temp_path.exists(): 375 temp_path.unlink() 376 raise 377 378 def read(self) -> tuple[Optional[Dict[str, Any]], float]: 379 """ 380 Read data, checking for external changes first. 381 382 Returns: 383 (data, mtime) tuple. data is None if file doesn't exist. 384 """ 385 with self._lock: 386 current_mtime = self._get_file_mtime() 387 388 # Check if file was modified externally (e.g., by Syncthing) 389 if current_mtime > self._cache_mtime or self._cache is None: 390 if current_mtime > self._cache_mtime and self._cache is not None: 391 logger.info("CacheIO: External change detected, reloading cache") 392 self._cache = self._load_from_file() 393 self._cache_mtime = current_mtime 394 395 return self._cache, self._cache_mtime 396 397 def write(self, data: Dict[str, Any]) -> float: 398 """ 399 Write data to cache and flush to file. 400 401 Returns: 402 New file mtime on success. 403 """ 404 with self._lock: 405 # Flush to file first (atomic) 406 new_mtime = self._flush_to_file(data) 407 # Update cache 408 self._cache = data 409 self._cache_mtime = new_mtime 410 return new_mtime 411 412 def invalidate(self) -> None: 413 """Force cache reload on next read.""" 414 with self._lock: 415 self._cache_mtime = 0.0 416 417 def exists(self) -> bool: 418 """Check if data file exists.""" 419 return self.data_path.exists() 420 421 def get_mtime(self) -> float: 422 """Get current file mtime (for change detection by clients).""" 423 return self._get_file_mtime() 424 425 426 # ============================================================================= 427 # Versioned Backup Manager 428 # ============================================================================= 429 430 431 class BackupManager: 432 """ 433 Manages versioned backups with tiered retention policy. 434 435 Backup creation is throttled to at most 1 per hour. 436 437 Retention Policy: 438 - Last 24 hours: Keep all (naturally ~24/day max due to throttle) 439 - Days 2-7: Keep 6 per day (4-hour windows) 440 - Days 8-30: Keep 2 per day (morning 6-18, evening 18-6) 441 - Beyond 30 days: Delete 442 443 Backup Filename Format: workspaces_2026-01-22T14-30-00.json 444 """ 445 446 # Filename pattern: workspaces_YYYY-MM-DDTHH-MM-SS.json 447 FILENAME_PATTERN = re.compile(r"^workspaces_(\d{4}-\d{2}-\d{2}T\d{2}-\d{2}-\d{2})\.json$") 448 449 def __init__(self, config_dir: Path): 450 self.config_dir = config_dir 451 self.backup_dir = config_dir / BACKUP_DIR 452 self._last_prune_time: float = 0 453 self._prune_interval = 3600 # Max once per hour 454 self._last_backup_time: Optional[datetime] = None 455 456 def _ensure_backup_dir(self) -> None: 457 """Ensure backup directory exists.""" 458 self.backup_dir.mkdir(parents=True, exist_ok=True) 459 460 def _generate_filename(self, timestamp: Optional[datetime] = None) -> str: 461 """Generate backup filename with timestamp.""" 462 if timestamp is None: 463 timestamp = datetime.now(timezone.utc) 464 # Format: workspaces_2026-01-22T14-30-00.json (replace colons with dashes for filename safety) 465 ts_str = timestamp.strftime("%Y-%m-%dT%H-%M-%S") 466 return f"workspaces_{ts_str}.json" 467 468 def _parse_filename_timestamp(self, filename: str) -> Optional[datetime]: 469 """Parse timestamp from backup filename.""" 470 match = self.FILENAME_PATTERN.match(filename) 471 if not match: 472 return None 473 try: 474 ts_str = match.group(1) 475 # Parse: 2026-01-22T14-30-00 476 return datetime.strptime(ts_str, "%Y-%m-%dT%H-%M-%S").replace(tzinfo=timezone.utc) 477 except ValueError: 478 return None 479 480 def create_backup(self, data: Dict[str, Any]) -> Optional[str]: 481 """ 482 Create a versioned backup of the workspace data. 483 Throttled to at most 1 backup per hour. 484 Returns the backup filename if successful, or None if throttled/failed. 485 """ 486 self._ensure_backup_dir() 487 488 now = datetime.now(timezone.utc) 489 490 # Throttle: skip if a backup was created within the last hour 491 if self._last_backup_time and (now - self._last_backup_time).total_seconds() < 3600: 492 logger.debug("Backup: Skipped — last backup was %ds ago (throttle: 1/hour)", 493 int((now - self._last_backup_time).total_seconds())) 494 return None 495 496 filename = self._generate_filename(now) 497 backup_path = self.backup_dir / filename 498 499 try: 500 backup_path.write_text( 501 json.dumps(data, indent=2, ensure_ascii=False), 502 encoding="utf-8" 503 ) 504 self._last_backup_time = now 505 logger.info("Backup: Created %s (%s)", filename, _summarize_workspaces(data)) 506 return filename 507 except (IOError, OSError) as e: 508 logger.error("Backup: Failed to create backup: %s", e) 509 return None 510 511 def list_backups(self) -> List[BackupInfo]: 512 """List all backups with metadata.""" 513 self._ensure_backup_dir() 514 515 backups = [] 516 now = datetime.now(timezone.utc) 517 518 for path in self.backup_dir.glob("workspaces_*.json"): 519 timestamp = self._parse_filename_timestamp(path.name) 520 if timestamp is None: 521 continue 522 523 try: 524 stat = path.stat() 525 age_days = (now - timestamp).total_seconds() / 86400 526 527 # Try to count workspaces (optional) 528 workspace_count = None 529 try: 530 data = json.loads(path.read_text(encoding="utf-8")) 531 # Count workspaces in the 'browsers' structure 532 total = 0 533 if "browsers" in data: 534 for browser_data in data["browsers"].values(): 535 if isinstance(browser_data, dict) and "workspaces" in browser_data: 536 total += len(browser_data["workspaces"]) 537 workspace_count = total 538 except (json.JSONDecodeError, KeyError): 539 pass 540 541 backups.append(BackupInfo( 542 filename=path.name, 543 timestamp=timestamp.isoformat(), 544 timestamp_unix=timestamp.timestamp(), 545 size=stat.st_size, 546 age_days=age_days, 547 workspace_count=workspace_count, 548 )) 549 except (IOError, OSError): 550 continue 551 552 # Sort by timestamp (newest first) 553 backups.sort(key=lambda b: b.timestamp_unix, reverse=True) 554 return backups 555 556 def get_backup_contents(self, filename: str) -> Optional[Dict[str, Any]]: 557 """Get the contents of a specific backup.""" 558 # Validate filename format 559 if not self.FILENAME_PATTERN.match(filename): 560 return None 561 562 backup_path = self.backup_dir / filename 563 if not backup_path.exists(): 564 return None 565 566 try: 567 data = json.loads(backup_path.read_text(encoding="utf-8")) 568 return data 569 except (IOError, OSError, json.JSONDecodeError): 570 return None 571 572 def restore_backup(self, filename: str, config_path: Path) -> bool: 573 """ 574 Restore a specific backup to the main config file. 575 Returns True if successful. 576 """ 577 # Validate filename format 578 if not self.FILENAME_PATTERN.match(filename): 579 return False 580 581 backup_path = self.backup_dir / filename 582 if not backup_path.exists(): 583 return False 584 585 try: 586 shutil.copy2(backup_path, config_path) 587 return True 588 except (IOError, OSError) as e: 589 logger.error("Backup: Failed to restore backup: %s", e) 590 return False 591 592 def delete_backup(self, filename: str) -> bool: 593 """Delete a specific backup. Returns True if successful.""" 594 # Validate filename format 595 if not self.FILENAME_PATTERN.match(filename): 596 return False 597 598 backup_path = self.backup_dir / filename 599 if not backup_path.exists(): 600 return False 601 602 try: 603 backup_path.unlink() 604 return True 605 except (IOError, OSError) as e: 606 logger.error("Backup: Failed to delete backup: %s", e) 607 return False 608 609 def prune_backups(self) -> tuple[int, int]: 610 """ 611 Prune backups according to retention policy. 612 Returns (deleted_count, remaining_count). 613 """ 614 self._ensure_backup_dir() 615 616 backups = self.list_backups() 617 if not backups: 618 return 0, 0 619 620 now = datetime.now(timezone.utc) 621 to_delete: List[str] = [] 622 to_keep: List[BackupInfo] = [] 623 624 # Group backups by day for tiered retention 625 backups_by_day: Dict[str, List[BackupInfo]] = {} 626 for backup in backups: 627 day_key = backup.timestamp[:10] # YYYY-MM-DD 628 if day_key not in backups_by_day: 629 backups_by_day[day_key] = [] 630 backups_by_day[day_key].append(backup) 631 632 for backup in backups: 633 age_days = backup.age_days 634 timestamp = datetime.fromisoformat(backup.timestamp) 635 day_key = backup.timestamp[:10] 636 day_backups = backups_by_day[day_key] 637 638 if age_days > BACKUP_RETENTION_DAYS: 639 # Beyond 30 days: delete 640 to_delete.append(backup.filename) 641 elif age_days <= 1: 642 # Last 24 hours: keep all 643 to_keep.append(backup) 644 elif age_days <= 7: 645 # Days 2-7: keep 6 per day (4-hour windows: 0-4, 4-8, 8-12, 12-16, 16-20, 20-24) 646 hour = timestamp.hour 647 window = hour // 4 # 0..5 648 649 # Find backups in the same 4-hour window on this day 650 window_backups = [ 651 b for b in day_backups 652 if datetime.fromisoformat(b.timestamp).hour // 4 == window 653 ] 654 655 keep = False 656 if window_backups: 657 # Keep the latest backup in this window 658 best = max(window_backups, key=lambda b: b.timestamp_unix) 659 if backup.filename == best.filename: 660 keep = True 661 662 if keep: 663 to_keep.append(backup) 664 else: 665 to_delete.append(backup.filename) 666 else: 667 # Days 8-30: keep 2 per day (morning 6-18, evening 18-6) 668 hour = timestamp.hour 669 is_morning = 6 <= hour < 18 670 671 # Find backups in the same half-day window on this day 672 morning_backups = [b for b in day_backups if 6 <= datetime.fromisoformat(b.timestamp).hour < 18] 673 evening_backups = [b for b in day_backups if not (6 <= datetime.fromisoformat(b.timestamp).hour < 18)] 674 675 keep = False 676 if is_morning and morning_backups: 677 best = max(morning_backups, key=lambda b: b.timestamp_unix) 678 if backup.filename == best.filename: 679 keep = True 680 elif not is_morning and evening_backups: 681 best = max(evening_backups, key=lambda b: b.timestamp_unix) 682 if backup.filename == best.filename: 683 keep = True 684 685 if keep: 686 to_keep.append(backup) 687 else: 688 to_delete.append(backup.filename) 689 690 # Delete the backups 691 deleted_count = 0 692 for filename in to_delete: 693 if self.delete_backup(filename): 694 deleted_count += 1 695 696 return deleted_count, len(to_keep) 697 698 def auto_prune_if_needed(self) -> None: 699 """Prune backups automatically if enough time has passed since last prune.""" 700 now = time.time() 701 if now - self._last_prune_time >= self._prune_interval: 702 self._last_prune_time = now 703 deleted, remaining = self.prune_backups() 704 if deleted > 0: 705 logger.info("Backup: Auto-pruned %d backups, %d remaining", deleted, remaining) 706 707 708 # Global backup manager instance (initialized with app_state) 709 backup_manager: Optional[BackupManager] = None 710 711 712 def get_backup_manager() -> BackupManager: 713 """Get or create the backup manager instance.""" 714 global backup_manager 715 if backup_manager is None: 716 backup_manager = BackupManager(app_state.config_dir) 717 return backup_manager 718 719 720 # ============================================================================= 721 # Application State 722 # ============================================================================= 723 724 725 class AppState: 726 """Application state container.""" 727 config_dir: Path = DEFAULT_CONFIG_DIR 728 todoist_token: Optional[str] = None 729 730 @property 731 def config_path(self) -> Path: 732 return self.config_dir / CONFIG_FILE 733 734 @property 735 def lock_path(self) -> Path: 736 return self.config_dir / LOCK_FILE 737 738 @property 739 def backup_path(self) -> Path: 740 return self.config_dir / (CONFIG_FILE + BACKUP_SUFFIX) 741 742 @property 743 def credentials_path(self) -> Path: 744 return self.config_dir / CREDENTIALS_FILE 745 746 @property 747 def credentials_lock_path(self) -> Path: 748 return self.config_dir / CREDENTIALS_LOCK_FILE 749 750 @property 751 def credentials_backup_path(self) -> Path: 752 return self.config_dir / (CREDENTIALS_FILE + BACKUP_SUFFIX) 753 754 755 app_state = AppState() 756 757 # Global CacheIO instance (initialized in lifespan) 758 cache_io: Optional[CacheIO] = None 759 760 # Global CacheIO instance for credentials 761 credentials_cache_io: Optional[CacheIO] = None 762 763 764 def get_cache_io() -> CacheIO: 765 """Get the CacheIO instance for workspaces.""" 766 global cache_io 767 if cache_io is None: 768 cache_io = CacheIO(app_state.config_path, app_state.lock_path) 769 return cache_io 770 771 772 def get_credentials_cache_io() -> CacheIO: 773 """Get the CacheIO instance for credentials.""" 774 global credentials_cache_io 775 if credentials_cache_io is None: 776 credentials_cache_io = CacheIO(app_state.credentials_path, app_state.credentials_lock_path) 777 return credentials_cache_io 778 779 780 # ============================================================================= 781 # Todoist Integration 782 # ============================================================================= 783 784 785 class TodoistService: 786 """Manages Todoist API interactions via the official async SDK.""" 787 788 def __init__(self, token: str): 789 self.token = token 790 self._api = TodoistAPIAsync(token) 791 self._master_project_id: Optional[str] = None 792 793 async def verify_connection(self) -> dict: 794 """Validate token and find/create the Mnemonic master project.""" 795 projects = [] 796 async for batch in await self._api.get_projects(): 797 projects.extend(batch) 798 master = next( 799 (p for p in projects if p.name == TODOIST_MASTER_PROJECT_NAME and not p.parent_id), 800 None, 801 ) 802 if not master: 803 master = await self._api.add_project(name=TODOIST_MASTER_PROJECT_NAME) 804 self._master_project_id = master.id 805 return { 806 "connected": True, 807 "master_project_id": master.id, 808 "master_project_name": master.name, 809 } 810 811 async def create_project(self, name: str, parent_id: Optional[str] = None) -> dict: 812 """Create a project under the master (or given parent) project.""" 813 if not self._master_project_id: 814 await self.verify_connection() 815 project = await self._api.add_project( 816 name=name, 817 parent_id=parent_id or self._master_project_id, 818 ) 819 return { 820 "id": project.id, 821 "name": project.name, 822 "parent_id": project.parent_id, 823 } 824 825 async def add_task( 826 self, 827 content: str, 828 project_id: str, 829 priority: int = 1, 830 due_string: Optional[str] = None, 831 ) -> dict: 832 """Add a task to a Todoist project.""" 833 kwargs: Dict[str, Any] = { 834 "content": content, 835 "project_id": project_id, 836 "priority": priority, 837 } 838 if due_string: 839 kwargs["due_string"] = due_string 840 task = await self._api.add_task(**kwargs) 841 return { 842 "id": task.id, 843 "content": task.content, 844 "project_id": task.project_id, 845 "priority": task.priority, 846 "url": f"https://todoist.com/showTask?id={task.id}", 847 } 848 849 async def get_tasks(self, project_id: str) -> list: 850 """Get active tasks for a project.""" 851 tasks = [] 852 async for batch in await self._api.get_tasks(project_id=project_id): 853 tasks.extend(batch) 854 return [ 855 { 856 "id": t.id, 857 "content": t.content, 858 "priority": t.priority, 859 } 860 for t in tasks 861 ] 862 863 864 # Singleton 865 _todoist_service: Optional[TodoistService] = None 866 867 868 def get_todoist_service() -> Optional[TodoistService]: 869 """Get or create the TodoistService singleton. Returns None if no token configured.""" 870 global _todoist_service 871 if _todoist_service is None and app_state.todoist_token: 872 if not TODOIST_SDK_AVAILABLE: 873 logger.warning("todoist-api-python not installed — Todoist features disabled") 874 return None 875 _todoist_service = TodoistService(app_state.todoist_token) 876 return _todoist_service 877 878 879 # ============================================================================= 880 # FastAPI Application 881 # ============================================================================= 882 883 884 @asynccontextmanager 885 async def lifespan(app: FastAPI): 886 """Application lifespan handler.""" 887 # Startup: ensure config directory exists 888 app_state.config_dir.mkdir(parents=True, exist_ok=True) 889 setup_file_logging(app_state.config_dir) 890 logger.info("Started on http://%s:%s", DEFAULT_HOST, DEFAULT_PORT) 891 logger.info("Config directory: %s", app_state.config_dir) 892 yield 893 # Shutdown 894 logger.info("Shutting down...") 895 896 897 app = FastAPI( 898 title="Mnemonic Workspaces Bridge", 899 description="REST API for syncing browser workspaces via Syncthing", 900 version=API_VERSION, 901 lifespan=lifespan, 902 ) 903 904 # CORS middleware - allow browser extensions to connect 905 app.add_middleware( 906 CORSMiddleware, 907 allow_origins=["*"], # Browser extensions use unique origins 908 allow_credentials=True, 909 allow_methods=["*"], 910 allow_headers=["*"], 911 ) 912 913 # Jinja2 templates for the dashboard (lazy-initialized when dashboard is enabled) 914 _templates_dir = Path(__file__).parent / "templates" 915 templates: Optional[Jinja2Templates] = None 916 917 # Dashboard is disabled by default; enabled via --dashboard-host at startup 918 _dashboard_enabled = False 919 920 921 def _init_dashboard_templates() -> None: 922 """Initialize Jinja2 templates when dashboard is enabled.""" 923 global templates 924 _templates_dir.mkdir(parents=True, exist_ok=True) 925 templates = Jinja2Templates(directory=str(_templates_dir)) 926 927 def _domain_filter(url: str) -> str: 928 try: 929 parsed = urlparse(url) 930 return parsed.hostname or url 931 except Exception: 932 return url 933 934 templates.env.filters["domain"] = _domain_filter 935 936 937 # ============================================================================= 938 # API Endpoints 939 # ============================================================================= 940 941 942 @app.get("/", include_in_schema=False) 943 async def root_redirect(): 944 return RedirectResponse(url="/dashboard") 945 946 947 @app.get("/health", response_model=HealthResponse, tags=["Health"]) 948 async def health_check(): 949 """ 950 Health check endpoint. 951 952 Use this to verify the service is running and check version compatibility. 953 The client should verify that its version meets min_client_version. 954 """ 955 return HealthResponse( 956 status="ok", 957 version=VersionInfo( 958 api_version=API_VERSION, 959 schema_version=SCHEMA_VERSION, 960 min_client_version=MIN_CLIENT_VERSION, 961 ), 962 config_dir=str(app_state.config_dir), 963 timestamp=datetime.now(timezone.utc).isoformat(), 964 ) 965 966 967 @app.get("/dashboard", include_in_schema=False) 968 async def dashboard(request: Request): 969 """ 970 Read-only web dashboard for browsing workspaces. 971 Renders all workspace data server-side for instant load. 972 Only available when started with --dashboard-host. 973 """ 974 if not _dashboard_enabled or templates is None: 975 raise HTTPException( 976 status_code=404, 977 detail="Dashboard is disabled. Start the service with --dashboard-host to enable it.", 978 ) 979 980 cio = get_cache_io() 981 data, mtime = cio.read() 982 983 # Pre-categorize workspaces in Python (Jinja2 scoping makes this painful in templates) 984 browser_sections = {} # browser_key -> { transcendent, parents, standalone } 985 total_workspaces = 0 986 total_tabs = 0 987 988 if data: 989 for browser_key, browser_data in data.get("browsers", {}).items(): 990 if not isinstance(browser_data, dict): 991 continue 992 workspaces = browser_data.get("workspaces", []) 993 994 # Build ID lookup for this browser 995 ws_by_id = {ws.get("id", ""): ws for ws in workspaces if ws.get("id")} 996 997 # Track which IDs are claimed as children 998 child_ids = set() 999 for ws in workspaces: 1000 for cid in ws.get("children", []): 1001 child_ids.add(cid) 1002 if ws.get("parentId"): 1003 child_ids.add(ws.get("id", "")) 1004 1005 transcendent = [] 1006 parents = [] 1007 standalone = [] 1008 1009 for ws in workspaces: 1010 ws_id = ws.get("id", "") 1011 total_workspaces += 1 1012 tabs = ws.get("tabs", []) 1013 if isinstance(tabs, list): 1014 total_tabs += len(tabs) 1015 1016 is_trans = ws.get("isTranscendent", False) or ws.get("transcendent", False) 1017 has_parent = bool(ws.get("parentId")) 1018 has_children = bool(ws.get("children")) and len(ws.get("children", [])) > 0 1019 1020 if is_trans: 1021 transcendent.append(ws) 1022 elif has_parent: 1023 pass # rendered under parent 1024 elif has_children: 1025 # Resolve children objects and count their tabs 1026 resolved_children = [] 1027 child_tab_total = 0 1028 for cid in ws.get("children", []): 1029 child_ws = ws_by_id.get(cid) 1030 if child_ws: 1031 resolved_children.append(child_ws) 1032 ctabs = child_ws.get("tabs", []) 1033 if isinstance(ctabs, list): 1034 child_tab_total += len(ctabs) 1035 # Also find children that reference this workspace via parentId 1036 for other in workspaces: 1037 if other.get("parentId") == ws_id and other.get("id") not in {c.get("id") for c in resolved_children}: 1038 resolved_children.append(other) 1039 ctabs = other.get("tabs", []) 1040 if isinstance(ctabs, list): 1041 child_tab_total += len(ctabs) 1042 parents.append({ 1043 "ws": ws, 1044 "children": resolved_children, 1045 "child_tab_total": child_tab_total, 1046 }) 1047 elif ws_id not in child_ids: 1048 standalone.append(ws) 1049 1050 browser_sections[browser_key] = { 1051 "workspaces": workspaces, 1052 "transcendent": transcendent, 1053 "parents": parents, 1054 "standalone": standalone, 1055 } 1056 1057 last_modified = None 1058 if data and "metadata" in data: 1059 last_modified = data["metadata"].get("lastModified") 1060 1061 # Todoist integration status 1062 todoist_configured = bool(app_state.todoist_token and TODOIST_SDK_AVAILABLE) 1063 todoist_status = None 1064 if todoist_configured: 1065 svc = get_todoist_service() 1066 if svc: 1067 try: 1068 todoist_status = await svc.verify_connection() 1069 except Exception as e: 1070 logger.warning("Todoist connection check failed: %s", e) 1071 todoist_status = {"connected": False, "error": str(e)} 1072 1073 # Build simplified workspace list per browser for the tab transfer picker 1074 browser_ws_json: Dict[str, list] = {} 1075 for bk, section in browser_sections.items(): 1076 browser_ws_json[bk] = [ 1077 { 1078 "id": ws.get("id", ""), 1079 "name": ws.get("name", ""), 1080 "tab_count": len(ws.get("tabs", [])) if isinstance(ws.get("tabs"), list) else 0, 1081 } 1082 for ws in section["workspaces"] 1083 ] 1084 1085 return templates.TemplateResponse("dashboard.html", { 1086 "request": request, 1087 "browser_sections": browser_sections, 1088 "total_workspaces": total_workspaces, 1089 "total_tabs": total_tabs, 1090 "last_modified": last_modified, 1091 "todoist_configured": todoist_configured, 1092 "todoist_status": todoist_status, 1093 "browser_ws_json": json.dumps(browser_ws_json), 1094 }) 1095 1096 1097 @app.get("/info", response_model=FileInfoResponse, tags=["Workspaces"]) 1098 async def get_file_info(): 1099 """ 1100 Get workspace file metadata. 1101 1102 Returns file existence, modification time, and size without reading contents. 1103 Useful for checking if external changes have occurred. 1104 """ 1105 config_path = app_state.config_path 1106 1107 if not config_path.exists(): 1108 return FileInfoResponse( 1109 exists=False, 1110 path=str(config_path), 1111 ) 1112 1113 stat = config_path.stat() 1114 return FileInfoResponse( 1115 exists=True, 1116 path=str(config_path), 1117 mtime=stat.st_mtime, 1118 mtime_iso=datetime.fromtimestamp(stat.st_mtime, tz=timezone.utc).isoformat(), 1119 size=stat.st_size, 1120 ) 1121 1122 1123 @app.get("/workspaces", response_model=ReadResponse, tags=["Workspaces"]) 1124 async def read_workspaces(): 1125 """ 1126 Read workspace data. 1127 1128 Returns the current workspace configuration from the in-memory cache. 1129 Automatically detects and reloads if file was modified externally (Syncthing). 1130 """ 1131 cio = get_cache_io() 1132 1133 try: 1134 data, mtime = cio.read() 1135 1136 if data is None: 1137 return ReadResponse(exists=False, data=None) 1138 1139 return ReadResponse( 1140 exists=True, 1141 data=data, 1142 mtime=mtime, 1143 ) 1144 except TimeoutError: 1145 raise HTTPException( 1146 status_code=503, 1147 detail=ErrorResponse( 1148 error="Could not acquire lock - file is busy", 1149 code="LOCK_TIMEOUT", 1150 ).model_dump(), 1151 ) 1152 except json.JSONDecodeError as e: 1153 raise HTTPException( 1154 status_code=400, 1155 detail=ErrorResponse( 1156 error=f"Invalid JSON in config file: {e}", 1157 code="INVALID_JSON", 1158 ).model_dump(), 1159 ) 1160 except IOError as e: 1161 raise HTTPException( 1162 status_code=500, 1163 detail=ErrorResponse( 1164 error=f"Failed to read config file: {e}", 1165 code="READ_ERROR", 1166 ).model_dump(), 1167 ) 1168 1169 1170 @app.post("/workspaces", response_model=WriteResponse, tags=["Workspaces"]) 1171 async def write_workspaces(request: WriteRequest): 1172 """ 1173 Write workspace data. 1174 1175 Updates the in-memory cache and atomically flushes to file. 1176 Automatically creates both a quick .bak backup and a versioned backup. 1177 """ 1178 config_path = app_state.config_path 1179 backup_path = app_state.backup_path 1180 data = request.data 1181 cio = get_cache_io() 1182 1183 try: 1184 # Create backups of existing data before writing 1185 existing_data, _ = cio.read() 1186 if existing_data is not None: 1187 # Keep existing .bak for quick recovery 1188 try: 1189 shutil.copy2(config_path, backup_path) 1190 except (IOError, OSError): 1191 pass # Best effort 1192 1193 # Create versioned backup 1194 try: 1195 bm = get_backup_manager() 1196 bm.create_backup(existing_data) 1197 # Auto-prune old backups (throttled to max once per hour) 1198 bm.auto_prune_if_needed() 1199 except Exception as e: 1200 # Log but don't fail - versioned backup is best-effort 1201 logger.warning("Backup: Failed to create versioned backup: %s", e) 1202 1203 # Update metadata timestamp 1204 if "metadata" not in data: 1205 data["metadata"] = {} 1206 data["metadata"]["lastModified"] = datetime.now(timezone.utc).isoformat() 1207 1208 # Ensure schema version 1209 if "version" not in data: 1210 data["version"] = SCHEMA_VERSION 1211 1212 # Write to cache and flush to file atomically 1213 new_mtime = cio.write(data) 1214 1215 logger.info("Write: %s", _summarize_workspaces(data)) 1216 1217 return WriteResponse( 1218 mtime=new_mtime, 1219 path=str(config_path), 1220 ) 1221 except TimeoutError: 1222 raise HTTPException( 1223 status_code=503, 1224 detail=ErrorResponse( 1225 error="Could not acquire write lock - file is busy", 1226 code="LOCK_TIMEOUT", 1227 ).model_dump(), 1228 ) 1229 except (IOError, OSError) as e: 1230 raise HTTPException( 1231 status_code=500, 1232 detail=ErrorResponse( 1233 error=f"Failed to write config file: {e}", 1234 code="WRITE_ERROR", 1235 ).model_dump(), 1236 ) 1237 1238 1239 @app.delete("/workspaces", response_model=DeleteResponse, tags=["Workspaces"]) 1240 async def delete_workspaces(): 1241 """ 1242 Delete workspace data. 1243 1244 Removes the workspace configuration file. A backup is created first. 1245 Invalidates the in-memory cache. 1246 """ 1247 config_path = app_state.config_path 1248 backup_path = app_state.backup_path 1249 cio = get_cache_io() 1250 lock = FileLock(app_state.lock_path) 1251 1252 try: 1253 with lock: 1254 if config_path.exists(): 1255 shutil.copy2(config_path, backup_path) 1256 config_path.unlink() 1257 # Invalidate cache 1258 cio.invalidate() 1259 return DeleteResponse(message="File deleted") 1260 except TimeoutError: 1261 raise HTTPException( 1262 status_code=503, 1263 detail=ErrorResponse( 1264 error="Could not acquire lock for delete - file is busy", 1265 code="LOCK_TIMEOUT", 1266 ).model_dump(), 1267 ) 1268 except (IOError, OSError) as e: 1269 raise HTTPException( 1270 status_code=500, 1271 detail=ErrorResponse( 1272 error=f"Failed to delete config file: {e}", 1273 code="DELETE_ERROR", 1274 ).model_dump(), 1275 ) 1276 1277 1278 @app.post("/workspaces/restore", response_model=RestoreResponse, tags=["Workspaces"]) 1279 async def restore_backup(): 1280 """ 1281 Restore from backup. 1282 1283 Restores the workspace configuration from the most recent backup. 1284 Invalidates the in-memory cache to force reload. 1285 """ 1286 config_path = app_state.config_path 1287 backup_path = app_state.backup_path 1288 cio = get_cache_io() 1289 lock = FileLock(app_state.lock_path) 1290 1291 if not backup_path.exists(): 1292 raise HTTPException( 1293 status_code=404, 1294 detail=ErrorResponse( 1295 error="No backup file exists", 1296 code="NO_BACKUP", 1297 ).model_dump(), 1298 ) 1299 1300 try: 1301 with lock: 1302 shutil.copy2(backup_path, config_path) 1303 # Invalidate cache to force reload 1304 cio.invalidate() 1305 return RestoreResponse( 1306 mtime=config_path.stat().st_mtime, 1307 message="Backup restored successfully", 1308 ) 1309 except TimeoutError: 1310 raise HTTPException( 1311 status_code=503, 1312 detail=ErrorResponse( 1313 error="Could not acquire lock for restore - file is busy", 1314 code="LOCK_TIMEOUT", 1315 ).model_dump(), 1316 ) 1317 except (IOError, OSError) as e: 1318 raise HTTPException( 1319 status_code=500, 1320 detail=ErrorResponse( 1321 error=f"Failed to restore backup: {e}", 1322 code="RESTORE_ERROR", 1323 ).model_dump(), 1324 ) 1325 1326 1327 # ============================================================================= 1328 # Versioned Backup Endpoints 1329 # ============================================================================= 1330 1331 1332 @app.get("/backups", response_model=BackupListResponse, tags=["Backups"]) 1333 async def list_backups(): 1334 """ 1335 List all versioned backups. 1336 1337 Returns a list of all backups with metadata including timestamp, size, 1338 and workspace count. Backups are sorted by timestamp (newest first). 1339 """ 1340 bm = get_backup_manager() 1341 backups = bm.list_backups() 1342 1343 total_size = sum(b.size for b in backups) 1344 1345 return BackupListResponse( 1346 backups=backups, 1347 total_count=len(backups), 1348 total_size=total_size, 1349 ) 1350 1351 1352 @app.get("/backups/search", tags=["Backups"]) 1353 async def search_backups(workspace: str = ""): 1354 """ 1355 Search for a workspace across all backups. 1356 1357 Returns matches sorted newest-first with tab counts, enabling 1358 the user to find when a workspace's tabs went to zero. 1359 """ 1360 if not workspace or len(workspace.strip()) < 1: 1361 return {"success": True, "results": [], "query": workspace} 1362 1363 bm = get_backup_manager() 1364 bm._ensure_backup_dir() 1365 1366 query = workspace.strip().lower() 1367 results: List[Dict[str, Any]] = [] 1368 1369 for path in sorted(bm.backup_dir.glob("workspaces_*.json"), reverse=True): 1370 timestamp = bm._parse_filename_timestamp(path.name) 1371 if timestamp is None: 1372 continue 1373 1374 try: 1375 data = json.loads(path.read_text(encoding="utf-8")) 1376 browsers = data.get("browsers", {}) 1377 for browser_key, browser_data in browsers.items(): 1378 if not isinstance(browser_data, dict): 1379 continue 1380 for ws in browser_data.get("workspaces", []): 1381 ws_name = ws.get("name", "") 1382 if query in ws_name.lower(): 1383 tabs = ws.get("tabs") 1384 tab_count = len(tabs) if isinstance(tabs, list) else -1 1385 results.append({ 1386 "filename": path.name, 1387 "timestamp": timestamp.isoformat(), 1388 "browser": browser_key, 1389 "workspace_name": ws_name, 1390 "workspace_id": ws.get("id", ""), 1391 "tab_count": tab_count, 1392 }) 1393 except (json.JSONDecodeError, IOError, OSError): 1394 continue 1395 1396 logger.info("Search: query=%r, results=%d", workspace, len(results)) 1397 return {"success": True, "results": results, "query": workspace} 1398 1399 1400 class SelectiveRestoreRequest(BaseModel): 1401 """Request for selective workspace restore.""" 1402 filename: str = Field(..., description="Backup filename to restore from") 1403 workspace_ids: List[str] = Field(..., description="Workspace IDs to restore selectively") 1404 1405 1406 @app.post("/backups/restore-selective", tags=["Backups"]) 1407 async def restore_selective(request: SelectiveRestoreRequest): 1408 """ 1409 Selectively restore specific workspaces from a backup. 1410 1411 Only replaces the specified workspaces in the current config 1412 with their versions from the backup. Other workspaces are unchanged. 1413 A backup of the current state is created first. 1414 """ 1415 bm = get_backup_manager() 1416 cio = get_cache_io() 1417 lock = FileLock(app_state.lock_path) 1418 1419 # Validate filename 1420 if not bm.FILENAME_PATTERN.match(request.filename): 1421 raise HTTPException(status_code=400, detail=ErrorResponse( 1422 error="Invalid backup filename format", code="INVALID_FILENAME" 1423 ).model_dump()) 1424 1425 backup_data = bm.get_backup_contents(request.filename) 1426 if backup_data is None: 1427 raise HTTPException(status_code=404, detail=ErrorResponse( 1428 error="Backup not found", code="BACKUP_NOT_FOUND" 1429 ).model_dump()) 1430 1431 try: 1432 with lock: 1433 # Read current data 1434 current_data, _ = cio.read() 1435 if current_data is None: 1436 raise HTTPException(status_code=404, detail=ErrorResponse( 1437 error="No current config to merge into", code="NO_CONFIG" 1438 ).model_dump()) 1439 1440 # Create backup of current state first 1441 bm.create_backup(current_data) 1442 1443 # Build lookup of backup workspaces by ID 1444 backup_ws_by_id: Dict[str, Any] = {} 1445 for browser_data in backup_data.get("browsers", {}).values(): 1446 if not isinstance(browser_data, dict): 1447 continue 1448 for ws in browser_data.get("workspaces", []): 1449 ws_id = ws.get("id") 1450 if ws_id: 1451 backup_ws_by_id[ws_id] = ws 1452 1453 # Replace matching workspaces in current config 1454 restored_count = 0 1455 ids_to_restore = set(request.workspace_ids) 1456 1457 for browser_key, browser_data in current_data.get("browsers", {}).items(): 1458 if not isinstance(browser_data, dict): 1459 continue 1460 workspaces = browser_data.get("workspaces", []) 1461 for i, ws in enumerate(workspaces): 1462 ws_id = ws.get("id") 1463 if ws_id in ids_to_restore and ws_id in backup_ws_by_id: 1464 workspaces[i] = backup_ws_by_id[ws_id] 1465 restored_count += 1 1466 ids_to_restore.discard(ws_id) 1467 1468 # Write merged result 1469 new_mtime = cio.write(current_data) 1470 logger.info( 1471 "Selective restore: %d workspaces from %s", 1472 restored_count, request.filename 1473 ) 1474 1475 return { 1476 "success": True, 1477 "restored_count": restored_count, 1478 "mtime": new_mtime, 1479 "message": f"Restored {restored_count} workspaces from {request.filename}", 1480 } 1481 except TimeoutError: 1482 raise HTTPException(status_code=503, detail=ErrorResponse( 1483 error="Could not acquire lock - file is busy", code="LOCK_TIMEOUT" 1484 ).model_dump()) 1485 except (IOError, OSError) as e: 1486 raise HTTPException(status_code=500, detail=ErrorResponse( 1487 error=f"Failed to restore: {e}", code="RESTORE_ERROR" 1488 ).model_dump()) 1489 1490 1491 @app.get("/backups/{filename}", response_model=BackupPreviewResponse, tags=["Backups"]) 1492 async def get_backup(filename: str): 1493 """ 1494 Get the contents of a specific backup for preview. 1495 1496 Returns the full backup data for displaying in a preview UI. 1497 """ 1498 bm = get_backup_manager() 1499 1500 # Validate filename 1501 if not bm.FILENAME_PATTERN.match(filename): 1502 raise HTTPException( 1503 status_code=400, 1504 detail=ErrorResponse( 1505 error="Invalid backup filename format", 1506 code="INVALID_FILENAME", 1507 ).model_dump(), 1508 ) 1509 1510 data = bm.get_backup_contents(filename) 1511 if data is None: 1512 raise HTTPException( 1513 status_code=404, 1514 detail=ErrorResponse( 1515 error="Backup not found", 1516 code="BACKUP_NOT_FOUND", 1517 ).model_dump(), 1518 ) 1519 1520 # Parse timestamp from filename 1521 timestamp = bm._parse_filename_timestamp(filename) 1522 timestamp_str = timestamp.isoformat() if timestamp else "" 1523 1524 return BackupPreviewResponse( 1525 filename=filename, 1526 timestamp=timestamp_str, 1527 data=data, 1528 ) 1529 1530 1531 @app.post("/backups/restore", response_model=RestoreFromBackupResponse, tags=["Backups"]) 1532 async def restore_from_versioned_backup(request: RestoreFromBackupRequest): 1533 """ 1534 Restore from a specific versioned backup. 1535 1536 Restores the workspace configuration from the specified backup file. 1537 The current configuration is backed up first before restoring. 1538 Invalidates the in-memory cache to force reload. 1539 """ 1540 bm = get_backup_manager() 1541 config_path = app_state.config_path 1542 cio = get_cache_io() 1543 lock = FileLock(app_state.lock_path) 1544 1545 # Validate filename 1546 if not bm.FILENAME_PATTERN.match(request.filename): 1547 raise HTTPException( 1548 status_code=400, 1549 detail=ErrorResponse( 1550 error="Invalid backup filename format", 1551 code="INVALID_FILENAME", 1552 ).model_dump(), 1553 ) 1554 1555 # Check backup exists 1556 backup_contents = bm.get_backup_contents(request.filename) 1557 if backup_contents is None: 1558 raise HTTPException( 1559 status_code=404, 1560 detail=ErrorResponse( 1561 error="Backup not found", 1562 code="BACKUP_NOT_FOUND", 1563 ).model_dump(), 1564 ) 1565 1566 try: 1567 with lock: 1568 # Create a backup of current state before restoring 1569 if config_path.exists(): 1570 current_data = json.loads(config_path.read_text(encoding="utf-8")) 1571 bm.create_backup(current_data) 1572 1573 # Restore the backup 1574 if not bm.restore_backup(request.filename, config_path): 1575 raise HTTPException( 1576 status_code=500, 1577 detail=ErrorResponse( 1578 error="Failed to restore backup", 1579 code="RESTORE_ERROR", 1580 ).model_dump(), 1581 ) 1582 1583 # Invalidate cache to force reload 1584 cio.invalidate() 1585 1586 logger.info("Restore: Restored from %s (%s)", request.filename, _summarize_workspaces(backup_contents)) 1587 1588 return RestoreFromBackupResponse( 1589 restored_from=request.filename, 1590 mtime=config_path.stat().st_mtime, 1591 message=f"Successfully restored from {request.filename}", 1592 ) 1593 except TimeoutError: 1594 raise HTTPException( 1595 status_code=503, 1596 detail=ErrorResponse( 1597 error="Could not acquire lock for restore - file is busy", 1598 code="LOCK_TIMEOUT", 1599 ).model_dump(), 1600 ) 1601 except (IOError, OSError) as e: 1602 raise HTTPException( 1603 status_code=500, 1604 detail=ErrorResponse( 1605 error=f"Failed to restore backup: {e}", 1606 code="RESTORE_ERROR", 1607 ).model_dump(), 1608 ) 1609 1610 1611 @app.delete("/backups/{filename}", response_model=DeleteResponse, tags=["Backups"]) 1612 async def delete_backup_file(filename: str): 1613 """ 1614 Delete a specific backup. 1615 1616 Permanently removes the specified backup file. 1617 """ 1618 bm = get_backup_manager() 1619 1620 # Validate filename 1621 if not bm.FILENAME_PATTERN.match(filename): 1622 raise HTTPException( 1623 status_code=400, 1624 detail=ErrorResponse( 1625 error="Invalid backup filename format", 1626 code="INVALID_FILENAME", 1627 ).model_dump(), 1628 ) 1629 1630 if not bm.delete_backup(filename): 1631 raise HTTPException( 1632 status_code=404, 1633 detail=ErrorResponse( 1634 error="Backup not found or could not be deleted", 1635 code="DELETE_FAILED", 1636 ).model_dump(), 1637 ) 1638 1639 return DeleteResponse(message=f"Backup {filename} deleted") 1640 1641 1642 @app.post("/backups/prune", response_model=PruneBackupsResponse, tags=["Backups"]) 1643 async def prune_backups(): 1644 """ 1645 Manually trigger backup retention cleanup. 1646 1647 Prunes backups according to the retention policy: 1648 - Last 24 hours: Keep all (~24/day max, creation throttled to 1/hour) 1649 - Days 2-7: Keep 6 per day (4-hour windows) 1650 - Days 8-30: Keep 2 per day (morning 6-18, evening 18-6) 1651 - Beyond 30 days: Delete 1652 """ 1653 bm = get_backup_manager() 1654 deleted, remaining = bm.prune_backups() 1655 1656 return PruneBackupsResponse( 1657 deleted_count=deleted, 1658 remaining_count=remaining, 1659 message=f"Pruned {deleted} backups, {remaining} remaining", 1660 ) 1661 1662 1663 # ============================================================================= 1664 # Encrypted Credentials Endpoints 1665 # ============================================================================= 1666 1667 1668 @app.get("/credentials/info", response_model=FileInfoResponse, tags=["Credentials"]) 1669 async def get_credentials_info(): 1670 """ 1671 Get encrypted credentials file metadata. 1672 1673 Returns file existence, modification time, and size without reading contents. 1674 Useful for checking if credentials exist on a new device. 1675 """ 1676 credentials_path = app_state.credentials_path 1677 1678 if not credentials_path.exists(): 1679 return FileInfoResponse( 1680 exists=False, 1681 path=str(credentials_path), 1682 ) 1683 1684 stat = credentials_path.stat() 1685 return FileInfoResponse( 1686 exists=True, 1687 path=str(credentials_path), 1688 mtime=stat.st_mtime, 1689 mtime_iso=datetime.fromtimestamp(stat.st_mtime, tz=timezone.utc).isoformat(), 1690 size=stat.st_size, 1691 ) 1692 1693 1694 @app.get("/credentials", response_model=ReadResponse, tags=["Credentials"]) 1695 async def read_credentials(): 1696 """ 1697 Read encrypted credentials file. 1698 1699 Returns the current encrypted credentials from the in-memory cache. 1700 The credentials are encrypted - the daemon never sees plaintext secrets. 1701 Automatically detects and reloads if file was modified externally (Syncthing). 1702 """ 1703 cio = get_credentials_cache_io() 1704 1705 try: 1706 data, mtime = cio.read() 1707 1708 if data is None: 1709 return ReadResponse(exists=False, data=None) 1710 1711 return ReadResponse( 1712 exists=True, 1713 data=data, 1714 mtime=mtime, 1715 ) 1716 except TimeoutError: 1717 raise HTTPException( 1718 status_code=503, 1719 detail=ErrorResponse( 1720 error="Could not acquire lock - file is busy", 1721 code="LOCK_TIMEOUT", 1722 ).model_dump(), 1723 ) 1724 except json.JSONDecodeError as e: 1725 raise HTTPException( 1726 status_code=400, 1727 detail=ErrorResponse( 1728 error=f"Invalid JSON in credentials file: {e}", 1729 code="INVALID_JSON", 1730 ).model_dump(), 1731 ) 1732 except IOError as e: 1733 raise HTTPException( 1734 status_code=500, 1735 detail=ErrorResponse( 1736 error=f"Failed to read credentials file: {e}", 1737 code="READ_ERROR", 1738 ).model_dump(), 1739 ) 1740 1741 1742 @app.post("/credentials", response_model=WriteResponse, tags=["Credentials"]) 1743 async def write_credentials(request: WriteRequest): 1744 """ 1745 Write encrypted credentials file. 1746 1747 Updates the in-memory cache and atomically flushes to file. 1748 A backup is created before writing. 1749 The credentials are encrypted - the daemon never sees plaintext secrets. 1750 """ 1751 credentials_path = app_state.credentials_path 1752 backup_path = app_state.credentials_backup_path 1753 data = request.data 1754 cio = get_credentials_cache_io() 1755 1756 try: 1757 # Create backup of existing data before writing 1758 existing_data, _ = cio.read() 1759 if existing_data is not None: 1760 try: 1761 shutil.copy2(credentials_path, backup_path) 1762 except (IOError, OSError): 1763 pass # Best effort 1764 1765 # Update lastModified timestamp 1766 if "lastModified" not in data: 1767 data["lastModified"] = datetime.now(timezone.utc).isoformat() 1768 1769 # Write to cache and flush to file atomically 1770 new_mtime = cio.write(data) 1771 1772 return WriteResponse( 1773 mtime=new_mtime, 1774 path=str(credentials_path), 1775 ) 1776 except TimeoutError: 1777 raise HTTPException( 1778 status_code=503, 1779 detail=ErrorResponse( 1780 error="Could not acquire write lock - file is busy", 1781 code="LOCK_TIMEOUT", 1782 ).model_dump(), 1783 ) 1784 except (IOError, OSError) as e: 1785 raise HTTPException( 1786 status_code=500, 1787 detail=ErrorResponse( 1788 error=f"Failed to write credentials file: {e}", 1789 code="WRITE_ERROR", 1790 ).model_dump(), 1791 ) 1792 1793 1794 # ============================================================================= 1795 # Todoist Endpoints 1796 # ============================================================================= 1797 1798 1799 class TodoistCreateProjectRequest(BaseModel): 1800 workspace_id: str 1801 workspace_name: str 1802 browser_key: str 1803 parent_todoist_project_id: Optional[str] = None 1804 1805 1806 class TodoistAddTaskRequest(BaseModel): 1807 project_id: str 1808 content: str 1809 priority: int = Field(default=1, ge=1, le=4) 1810 due_string: Optional[str] = None 1811 1812 1813 # ============================================================================= 1814 # Cross-Browser Tab Transfer Models & State 1815 # ============================================================================= 1816 1817 1818 class TabTransferRequest(BaseModel): 1819 """Request to transfer a tab from one browser to another.""" 1820 source_browser: str 1821 target_browser: str 1822 target_workspace_id: str 1823 target_workspace_name: str 1824 tab: Dict[str, Any] # { url, title, pinned } 1825 1826 1827 _pending_transfers: List[Dict[str, Any]] = [] 1828 _transfers_lock = threading.Lock() 1829 1830 1831 @app.get("/todoist/status", tags=["Todoist"]) 1832 async def todoist_status(): 1833 """Check Todoist connection status.""" 1834 if not app_state.todoist_token or not TODOIST_SDK_AVAILABLE: 1835 return {"configured": False} 1836 svc = get_todoist_service() 1837 if not svc: 1838 return {"configured": False} 1839 try: 1840 status = await svc.verify_connection() 1841 return {"configured": True, **status} 1842 except Exception as e: 1843 logger.warning("Todoist status check failed: %s", e) 1844 raise HTTPException(status_code=502, detail=f"Todoist API error: {e}") 1845 1846 1847 @app.post("/todoist/create-project", tags=["Todoist"]) 1848 async def todoist_create_project(req: TodoistCreateProjectRequest): 1849 """Create a Todoist project for a workspace and save the link in workspaces.json.""" 1850 svc = get_todoist_service() 1851 if not svc: 1852 raise HTTPException(status_code=400, detail="Todoist not configured") 1853 1854 try: 1855 project = await svc.create_project( 1856 name=req.workspace_name, 1857 parent_id=req.parent_todoist_project_id, 1858 ) 1859 except Exception as e: 1860 logger.error("Todoist create project failed: %s", e) 1861 raise HTTPException(status_code=502, detail=f"Todoist API error: {e}") 1862 1863 # Update workspaces.json with the new todoist project ID 1864 data_updated = False 1865 try: 1866 cio = get_cache_io() 1867 data, _ = cio.read() 1868 if data: 1869 browser_data = data.get("browsers", {}).get(req.browser_key, {}) 1870 workspaces = browser_data.get("workspaces", []) 1871 for ws in workspaces: 1872 if ws.get("id") == req.workspace_id: 1873 ws["todoistProjectId"] = project["id"] 1874 data_updated = True 1875 break 1876 if data_updated: 1877 cio.write(data) 1878 logger.info( 1879 "Linked workspace %s (%s) to Todoist project %s", 1880 req.workspace_name, req.workspace_id, project["id"], 1881 ) 1882 except Exception as e: 1883 logger.error("Failed to update workspaces.json with Todoist project ID: %s", e) 1884 1885 return { 1886 "todoist_project_id": project["id"], 1887 "project_name": project["name"], 1888 "project_url": f"https://todoist.com/app/project/{project['id']}", 1889 "data_updated": data_updated, 1890 } 1891 1892 1893 @app.post("/todoist/add-task", tags=["Todoist"]) 1894 async def todoist_add_task(req: TodoistAddTaskRequest): 1895 """Add a task to a Todoist project.""" 1896 svc = get_todoist_service() 1897 if not svc: 1898 raise HTTPException(status_code=400, detail="Todoist not configured") 1899 1900 try: 1901 task = await svc.add_task( 1902 content=req.content, 1903 project_id=req.project_id, 1904 priority=req.priority, 1905 due_string=req.due_string, 1906 ) 1907 except Exception as e: 1908 logger.error("Todoist add task failed: %s", e) 1909 raise HTTPException(status_code=502, detail=f"Todoist API error: {e}") 1910 1911 return task 1912 1913 1914 # ============================================================================= 1915 # Cross-Browser Tab Transfer Endpoints 1916 # ============================================================================= 1917 1918 1919 @app.post("/transfers", tags=["Transfers"]) 1920 async def create_transfer(req: TabTransferRequest): 1921 """ 1922 Queue a tab transfer from one browser to another. 1923 1924 The transfer is held in-memory with a 5-minute TTL. 1925 The target browser's extension polls GET /transfers/{browser_key} 1926 to pick up pending transfers. 1927 """ 1928 transfer = { 1929 "id": str(uuid.uuid4()), 1930 "source_browser": req.source_browser, 1931 "target_browser": req.target_browser, 1932 "target_workspace_id": req.target_workspace_id, 1933 "target_workspace_name": req.target_workspace_name, 1934 "tab": req.tab, 1935 "created_at": datetime.now(timezone.utc).isoformat(), 1936 } 1937 with _transfers_lock: 1938 _pending_transfers.append(transfer) 1939 logger.info( 1940 "Tab transfer queued: %s → %s (workspace: %s, url: %s)", 1941 req.source_browser, req.target_browser, 1942 req.target_workspace_name, req.tab.get("url", "?"), 1943 ) 1944 return {"success": True, "transfer_id": transfer["id"]} 1945 1946 1947 @app.get("/transfers/{browser_key}", tags=["Transfers"]) 1948 async def poll_transfers(browser_key: str): 1949 """ 1950 Fetch and clear all pending transfers for a specific browser. 1951 1952 Expired transfers (>5 min old) are pruned automatically. 1953 Returned transfers are removed from the queue (fetch-and-clear pattern). 1954 """ 1955 now = datetime.now(timezone.utc) 1956 ttl = timedelta(minutes=5) 1957 matched = [] 1958 remaining = [] 1959 with _transfers_lock: 1960 for t in _pending_transfers: 1961 created = datetime.fromisoformat(t["created_at"]) 1962 if now - created > ttl: 1963 continue # expired — drop 1964 if t["target_browser"] == browser_key: 1965 matched.append(t) 1966 else: 1967 remaining.append(t) 1968 _pending_transfers.clear() 1969 _pending_transfers.extend(remaining) 1970 if matched: 1971 logger.info( 1972 "Delivering %d transfer(s) to %s", len(matched), browser_key, 1973 ) 1974 return {"transfers": matched} 1975 1976 1977 # ============================================================================= 1978 # Main Entry Point 1979 # ============================================================================= 1980 1981 1982 def parse_args(): 1983 """Parse command line arguments.""" 1984 parser = argparse.ArgumentParser( 1985 description="Mnemonic Workspaces Bridge - REST API Service", 1986 formatter_class=argparse.RawDescriptionHelpFormatter, 1987 epilog=""" 1988 Examples: 1989 python workspaces-bridge.py 1990 python workspaces-bridge.py --port 9000 1991 python workspaces-bridge.py --config-dir "C:\\Users\\Me\\MyWorkspaces" 1992 python workspaces-bridge.py --dashboard-host 0.0.0.0 1993 1994 The service will be available at http://127.0.0.1:8765 by default. 1995 Configure your browser extensions to connect to this URL. 1996 1997 The web dashboard is disabled by default. Use --dashboard-host to enable 1998 it and make it accessible on the LAN (e.g. from your phone). 1999 """, 2000 ) 2001 parser.add_argument( 2002 "--host", 2003 default=DEFAULT_HOST, 2004 help=f"Host to bind to (default: {DEFAULT_HOST})", 2005 ) 2006 parser.add_argument( 2007 "--port", 2008 type=int, 2009 default=DEFAULT_PORT, 2010 help=f"Port to bind to (default: {DEFAULT_PORT})", 2011 ) 2012 parser.add_argument( 2013 "--config-dir", 2014 type=Path, 2015 default=DEFAULT_CONFIG_DIR, 2016 help=f"Configuration directory (default: {DEFAULT_CONFIG_DIR})", 2017 ) 2018 parser.add_argument( 2019 "--dashboard-host", 2020 default=None, 2021 help="Enable web dashboard and bind to this address (e.g. 0.0.0.0 for LAN access). " 2022 "Dashboard is disabled when not specified.", 2023 ) 2024 parser.add_argument( 2025 "--todoist-token", 2026 default=None, 2027 help="Todoist API token. Can also be set via TODOIST_API_TOKEN env var.", 2028 ) 2029 return parser.parse_args() 2030 2031 2032 def main(): 2033 """Main entry point.""" 2034 args = parse_args() 2035 2036 # Update app state with config directory 2037 app_state.config_dir = args.config_dir 2038 app_state.config_dir.mkdir(parents=True, exist_ok=True) 2039 2040 # Import uvicorn here to avoid import errors if not installed 2041 try: 2042 import uvicorn 2043 except ImportError: 2044 logger.error("uvicorn is required. Install it with: pip install uvicorn") 2045 return 1 2046 2047 # Enable dashboard if --dashboard-host was specified 2048 global _dashboard_enabled 2049 if args.dashboard_host is not None: 2050 _dashboard_enabled = True 2051 _init_dashboard_templates() 2052 # When dashboard is enabled, bind to dashboard-host for LAN access 2053 bind_host = args.dashboard_host 2054 else: 2055 bind_host = args.host 2056 2057 # Resolve Todoist token: CLI arg takes precedence over env var 2058 todoist_token = args.todoist_token or os.environ.get("TODOIST_API_TOKEN") 2059 if todoist_token: 2060 app_state.todoist_token = todoist_token 2061 2062 setup_file_logging(app_state.config_dir) 2063 logger.info("Starting on http://%s:%s", bind_host, args.port) 2064 logger.info("Config directory: %s", app_state.config_dir) 2065 if _dashboard_enabled: 2066 logger.info("Dashboard: http://%s:%s/dashboard", bind_host, args.port) 2067 else: 2068 logger.info("Dashboard: disabled (use --dashboard-host to enable)") 2069 if app_state.todoist_token: 2070 if TODOIST_SDK_AVAILABLE: 2071 logger.info("Todoist: configured (token set)") 2072 else: 2073 logger.warning("Todoist: token set but todoist-api-python not installed") 2074 else: 2075 logger.info("Todoist: disabled (no token)") 2076 logger.info("Press Ctrl+C to stop") 2077 2078 uvicorn.run( 2079 app, 2080 host=bind_host, 2081 port=args.port, 2082 log_level="info", 2083 ) 2084 2085 2086 if __name__ == "__main__": 2087 main()