/ __quarantined_tests__ / cron / cron-coverage-supplement.test.js
cron-coverage-supplement.test.js
   1  /**
   2   * Tests for src/cron.js — coverage supplement targeting uncovered HANDLERS code paths
   3   *
   4   * Covers:
   5   * 1. backupDatabase — with temp backup dir (lines 497-768): full backup body
   6   * 2. analyzePerformance — empty tables (missing ANALYZE stats path, lines 951-979)
   7   * 3. checkKeywords — with seeded data (lines 171-201)
   8   * 4. purgeSiteStatusHistory — with overflow data (lines 1066-1109)
   9   * 5. checkRateLimits — with missing env vars (lines 1038-1064)
  10   * 6. unifiedAutofix — error path (lines 1298-1312): script doesn't exist
  11   * 7. logTaskComplete — items_processed field (lines 1569-1570)
  12   * 8. logTaskComplete — metrics.processed field (line 1568)
  13   * 9. executeCommand — via command-type job via runCron
  14   * 10. analyzePerformance — full structure returned correctly
  15   *
  16   * NOTE: requires --experimental-test-module-mocks flag
  17   */
  18  
  19  import { test, describe, mock, after } from 'node:test';
  20  import assert from 'node:assert/strict';
  21  import Database from 'better-sqlite3';
  22  import { join, dirname } from 'path';
  23  import { tmpdir } from 'os';
  24  import { existsSync, unlinkSync, mkdirSync, rmSync, readdirSync, lstatSync } from 'fs';
  25  import { fileURLToPath } from 'url';
  26  
  27  const __filename = fileURLToPath(import.meta.url);
  28  const __dirname = dirname(__filename);
  29  
  30  const TEST_DB = join(tmpdir(), `test-cron-supplement-${Date.now()}.db`);
  31  const OPS_DB = join(tmpdir(), `test-cron-supplement-ops-${Date.now()}.db`);
  32  const TEL_DB = join(tmpdir(), `test-cron-supplement-tel-${Date.now()}.db`);
  33  process.env.DATABASE_PATH = TEST_DB;
  34  process.env.OPS_DB_PATH = OPS_DB;
  35  process.env.TEL_DB_PATH = TEL_DB;
  36  process.env.NODE_ENV = 'test';
  37  
  38  const PROJECT_ROOT = join(__dirname, '..', '..');
  39  
  40  // ── Temp backup dir: isolated from real db/backup ────────────────────────────
  41  // We use a temp dir inside /tmp, then override process.env so backupDatabase
  42  // reads it. But backupDatabase uses projectRoot which is hardcoded. So instead
  43  // we create the real db/backup dir temporarily (same approach as boost test).
  44  const REAL_BACKUP_DIR = join(PROJECT_ROOT, 'db', 'backup');
  45  
  46  // ── Mock all external dependencies BEFORE importing cron.js ──────────────────
  47  
  48  mock.module('../../src/utils/sync-email-events.js', {
  49    namedExports: { syncEmailEvents: async () => ({ synced: 2, errors: 0 }) },
  50  });
  51  mock.module('../../src/utils/sync-unsubscribes.js', {
  52    namedExports: { syncUnsubscribes: async () => ({ synced: 1, errors: 0 }) },
  53  });
  54  mock.module('../../src/inbound/sms.js', {
  55    namedExports: {
  56      pollInboundSMS: async () => ({ processed: 0, new_messages: 0 }),
  57      setupWebhookServer: async () => {},
  58    },
  59  });
  60  mock.module('../../src/inbound/email.js', {
  61    namedExports: { pollInboundEmails: async () => ({ processed: 0, stored: 0, unmatched: 0 }) },
  62  });
  63  mock.module('../../src/inbound/processor.js', {
  64    namedExports: { processAllReplies: async () => ({ sms: { sent: 0 }, email: { sent: 0 } }) },
  65  });
  66  mock.module('../../src/cron/poll-free-scans.js', {
  67    namedExports: { pollFreeScans: async () => ({ processed: 0, inserted: 0, failed: 0 }) },
  68  });
  69  mock.module('../../src/cron/poll-purchases.js', {
  70    namedExports: { pollPurchases: async () => ({ processed: 0, successful: 0 }) },
  71  });
  72  mock.module('../../src/cron/process-purchases.js', {
  73    namedExports: {
  74      processPendingPurchases: async () => ({ processed: 0, delivered: 0, failed: 0 }),
  75    },
  76  });
  77  mock.module('../../src/cron/precompute-dashboard.js', {
  78    namedExports: {
  79      precomputeDashboard: async () => ({ summary: 'ok', details: {}, metrics: {} }),
  80    },
  81  });
  82  mock.module('../../src/cron/process-guardian.js', {
  83    namedExports: {
  84      runProcessGuardian: async () => ({
  85        checks_run: 1,
  86        ok: 1,
  87        warnings: 0,
  88        critical: 0,
  89        duration_seconds: 0.01,
  90        results: [],
  91      }),
  92    },
  93  });
  94  mock.module('../../src/cron/process-reaper.js', {
  95    namedExports: {
  96      runProcessReaper: async () => ({
  97        zombie_count: 0,
  98        free_mem_mb: 512,
  99        swap_pct: 0,
 100        stale_processes_killed: 0,
 101        duration_seconds: 0.01,
 102      }),
 103    },
 104  });
 105  mock.module('../../src/cron/cleanup-test-dbs.js', {
 106    namedExports: { runCleanupTestDbs: () => ({ deleted: 0, freed_kb: 0 }) },
 107  });
 108  mock.module('../../src/cron/pipeline-status-monitor.js', {
 109    namedExports: {
 110      runPipelineStatusMonitor: async () => ({
 111        summary: 'Pipeline ok',
 112        checks_run: 1,
 113        duration_seconds: 0.01,
 114        actions: [],
 115      }),
 116    },
 117  });
 118  mock.module('../../src/cron/classify-unknown-errors.js', {
 119    namedExports: {
 120      classifyUnknownErrors: async () => ({
 121        sites_retried: 0,
 122        outreaches_retried: 0,
 123        patterns_applied: 0,
 124      }),
 125    },
 126  });
 127  mock.module('../../src/agents/utils/task-manager.js', {
 128    namedExports: {
 129      createAgentTask: async () => 1,
 130      findDuplicateTask: async () => null,
 131    },
 132  });
 133  mock.module('../../src/utils/log-rotator.js', {
 134    namedExports: { rotateLogs: () => ({ deleted: 0, kept: 2, freedSpace: 0 }) },
 135  });
 136  mock.module('../../src/utils/rate-limit-scheduler.js', {
 137    namedExports: {
 138      getSkipStages: () => new Set(),
 139      getRateLimitStatus: () => [],
 140      setRateLimit: () => {},
 141    },
 142  });
 143  mock.module('../../src/utils/load-env.js', { namedExports: {} });
 144  
 145  // ── Create DB schema before import ────────────────────────────────────────────
 146  // ops tables live in ATTACHed ops.db, tel tables in telemetry.db
 147  
 148  {
 149    // -- ops.db --
 150    const opsDb = new Database(OPS_DB);
 151    opsDb.pragma('journal_mode = WAL');
 152    opsDb.exec(`
 153      CREATE TABLE IF NOT EXISTS settings (
 154        key TEXT PRIMARY KEY,
 155        value TEXT,
 156        description TEXT,
 157        updated_at TEXT DEFAULT CURRENT_TIMESTAMP
 158      );
 159      CREATE TABLE IF NOT EXISTS cron_jobs (
 160        id INTEGER PRIMARY KEY AUTOINCREMENT,
 161        name TEXT NOT NULL UNIQUE,
 162        task_key TEXT NOT NULL UNIQUE,
 163        description TEXT,
 164        enabled INTEGER NOT NULL DEFAULT 1,
 165        handler_type TEXT NOT NULL DEFAULT 'function',
 166        handler_value TEXT,
 167        interval_value INTEGER NOT NULL DEFAULT 5,
 168        interval_unit TEXT NOT NULL DEFAULT 'minutes',
 169        timeout_seconds INTEGER,
 170        priority INTEGER DEFAULT 5,
 171        critical INTEGER DEFAULT 1,
 172        last_run_at TEXT,
 173        created_at TEXT DEFAULT (datetime('now')),
 174        updated_at TEXT DEFAULT (datetime('now'))
 175      );
 176      CREATE TABLE IF NOT EXISTS cron_job_logs (
 177        id INTEGER PRIMARY KEY AUTOINCREMENT,
 178        job_name TEXT NOT NULL,
 179        started_at TEXT NOT NULL DEFAULT (datetime('now')),
 180        finished_at TEXT,
 181        status TEXT NOT NULL DEFAULT 'running',
 182        summary TEXT,
 183        full_log TEXT,
 184        error_message TEXT,
 185        items_processed INTEGER DEFAULT 0,
 186        items_failed INTEGER DEFAULT 0
 187      );
 188      CREATE TABLE IF NOT EXISTS cron_locks (
 189        lock_key TEXT PRIMARY KEY,
 190        description TEXT,
 191        updated_at TEXT DEFAULT (datetime('now'))
 192      );
 193      CREATE TABLE IF NOT EXISTS pipeline_control (key TEXT PRIMARY KEY, value TEXT);
 194    `);
 195    opsDb.close();
 196  
 197    // -- telemetry.db --
 198    const telDb = new Database(TEL_DB);
 199    telDb.pragma('journal_mode = WAL');
 200    telDb.exec(`
 201      CREATE TABLE IF NOT EXISTS agent_tasks (
 202        id INTEGER PRIMARY KEY AUTOINCREMENT,
 203        task_type TEXT NOT NULL,
 204        assigned_to TEXT,
 205        priority INTEGER DEFAULT 5,
 206        status TEXT DEFAULT 'pending',
 207        context_json TEXT,
 208        result_json TEXT,
 209        error_message TEXT,
 210        created_at TEXT DEFAULT (datetime('now')),
 211        updated_at TEXT DEFAULT (datetime('now'))
 212      );
 213      CREATE TABLE IF NOT EXISTS llm_usage (
 214        id INTEGER PRIMARY KEY AUTOINCREMENT,
 215        site_id INTEGER,
 216        stage TEXT NOT NULL,
 217        provider TEXT NOT NULL DEFAULT 'openrouter',
 218        model TEXT NOT NULL DEFAULT 'unknown',
 219        prompt_tokens INTEGER NOT NULL DEFAULT 0,
 220        completion_tokens INTEGER NOT NULL DEFAULT 0,
 221        total_tokens INTEGER NOT NULL DEFAULT 0,
 222        estimated_cost DECIMAL(10, 6),
 223        created_at DATETIME DEFAULT CURRENT_TIMESTAMP
 224      );
 225    `);
 226    telDb.close();
 227  
 228    // -- main sites.db --
 229    const db = new Database(TEST_DB);
 230    db.pragma('journal_mode = WAL');
 231    db.exec(`
 232      CREATE TABLE IF NOT EXISTS messages (
 233        id INTEGER PRIMARY KEY AUTOINCREMENT,
 234        site_id INTEGER,
 235        direction TEXT NOT NULL DEFAULT 'outbound',
 236        approval_status TEXT,
 237        delivery_status TEXT,
 238        contact_method TEXT,
 239        created_at TEXT DEFAULT CURRENT_TIMESTAMP,
 240        updated_at TEXT DEFAULT CURRENT_TIMESTAMP
 241      );
 242      CREATE TABLE IF NOT EXISTS sites (
 243        id INTEGER PRIMARY KEY AUTOINCREMENT,
 244        domain TEXT NOT NULL DEFAULT 'test.com',
 245        status TEXT DEFAULT 'found',
 246        score REAL,
 247        error_message TEXT,
 248        updated_at TEXT DEFAULT CURRENT_TIMESTAMP
 249      );
 250      CREATE TABLE IF NOT EXISTS keywords (
 251        id INTEGER PRIMARY KEY AUTOINCREMENT,
 252        keyword TEXT NOT NULL,
 253        status TEXT DEFAULT 'pending'
 254      );
 255      CREATE TABLE IF NOT EXISTS site_status (
 256        id INTEGER PRIMARY KEY AUTOINCREMENT,
 257        site_id INTEGER NOT NULL,
 258        status TEXT,
 259        created_at TEXT DEFAULT (datetime('now'))
 260      );
 261      CREATE TABLE IF NOT EXISTS human_review_queue (
 262        id INTEGER PRIMARY KEY AUTOINCREMENT,
 263        file TEXT,
 264        reason TEXT,
 265        type TEXT,
 266        priority TEXT DEFAULT 'medium',
 267        status TEXT DEFAULT 'pending',
 268        created_at TEXT DEFAULT (datetime('now')),
 269        reviewed_at TEXT,
 270        reviewed_by TEXT,
 271        notes TEXT
 272      );
 273    `);
 274    db.close();
 275  }
 276  
 277  // Import AFTER mocks + schema
 278  const { default: cronModule } = await import('../../src/cron.js');
 279  
 280  // ── Track whether we created the backup dir ───────────────────────────────────
 281  let createdBackupDir = false;
 282  
 283  after(() => {
 284    for (const f of [TEST_DB, OPS_DB, TEL_DB]) {
 285      if (existsSync(f)) {
 286        try { unlinkSync(f); } catch { /* ignore */ }
 287      }
 288    }
 289    // Clean up backup files we may have written to db/backup/
 290    if (createdBackupDir && existsSync(REAL_BACKUP_DIR)) {
 291      try {
 292        const files = readdirSync(REAL_BACKUP_DIR).filter(
 293          f => f.startsWith('sites-backup-') && f.endsWith('.db')
 294        );
 295        for (const f of files) {
 296          try {
 297            unlinkSync(join(REAL_BACKUP_DIR, f));
 298          } catch {
 299            /* ignore */
 300          }
 301        }
 302        // Only remove the dir if we created it (not a pre-existing real one)
 303        try {
 304          rmSync(REAL_BACKUP_DIR, { recursive: false });
 305        } catch {
 306          /* ignore if non-empty or not ours */
 307        }
 308      } catch {
 309        /* ignore */
 310      }
 311    }
 312  });
 313  
 314  // ── Helpers ───────────────────────────────────────────────────────────────────
 315  
 316  /**
 317   * Open a DB connection with ops tables ATTACHed — matches the runtime layout.
 318   * Unqualified ops table names (cron_jobs, settings, etc.) resolve to the ops schema.
 319   * Main DB tables (sites, keywords, messages) are accessible unqualified from the main schema.
 320   */
 321  function openDb() {
 322    const db = new Database(TEST_DB);
 323    db.exec(`ATTACH DATABASE '${OPS_DB}' AS ops`);
 324    return db;
 325  }
 326  
 327  /** Open only the ops database directly */
 328  function openOpsDb() {
 329    return new Database(OPS_DB);
 330  }
 331  
 332  function clearJobs(db) {
 333    db.prepare('DELETE FROM cron_jobs').run();
 334    db.prepare('DELETE FROM cron_job_logs').run();
 335    db.prepare('DELETE FROM cron_locks').run();
 336  }
 337  
 338  function seedJob(db, overrides = {}) {
 339    const defaults = {
 340      name: 'Test Job',
 341      task_key: 'syncEmailEvents',
 342      enabled: 1,
 343      handler_type: 'function',
 344      handler_value: null,
 345      interval_value: 1,
 346      interval_unit: 'minutes',
 347      timeout_seconds: 30,
 348      critical: 0,
 349      last_run_at: null,
 350    };
 351    const job = { ...defaults, ...overrides };
 352    db.prepare(
 353      `INSERT OR REPLACE INTO cron_jobs
 354       (name, task_key, enabled, handler_type, handler_value, interval_value, interval_unit, timeout_seconds, critical, last_run_at)
 355       VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?)`
 356    ).run(
 357      job.name,
 358      job.task_key,
 359      job.enabled,
 360      job.handler_type,
 361      job.handler_value,
 362      job.interval_value,
 363      job.interval_unit,
 364      job.timeout_seconds,
 365      job.critical,
 366      job.last_run_at
 367    );
 368  }
 369  
 370  // ── Tests: backupDatabase with real backup dir ────────────────────────────────
 371  //
 372  // db/backup is a symlink to an external drive that may not be mounted.
 373  // lstatSync detects symlinks; existsSync returns false for dangling symlinks.
 374  // Strategy:
 375  //   - If REAL_BACKUP_DIR is accessible (exists + not a dangling symlink): run backup, assert success/aborted/skipped.
 376  //   - If REAL_BACKUP_DIR is a dangling symlink or non-existent: create a real temp dir there (only if not a symlink),
 377  //     run backup, assert, then clean up.
 378  //   - If it's a dangling symlink we cannot mkdir it — skip backup body test and just confirm skipped result.
 379  
 380  function backupDirIsAccessible() {
 381    return existsSync(REAL_BACKUP_DIR);
 382  }
 383  
 384  function backupDirIsDanglingSymlink() {
 385    try {
 386      const stat = lstatSync(REAL_BACKUP_DIR);
 387      if (stat.isSymbolicLink()) {
 388        return !existsSync(REAL_BACKUP_DIR); // dangling if lstat works but existsSync fails
 389      }
 390      return false;
 391    } catch {
 392      return false; // doesn't exist at all
 393    }
 394  }
 395  
 396  describe('HANDLERS.backupDatabase — backup dir present (covers main backup body)', () => {
 397    test('returns structured result when backup dir exists', async () => {
 398      const isDangling = backupDirIsDanglingSymlink();
 399      if (!isDangling && !backupDirIsAccessible()) {
 400        // dir doesn't exist and is not a symlink — create it
 401        mkdirSync(REAL_BACKUP_DIR, { recursive: true });
 402        createdBackupDir = true;
 403      }
 404  
 405      const { HANDLERS } = cronModule;
 406      const result = await HANDLERS.backupDatabase();
 407  
 408      assert.ok(typeof result === 'object', 'should return an object');
 409      assert.ok(typeof result.summary === 'string', 'should have a summary string');
 410      assert.ok(typeof result.metrics === 'object', 'should have metrics object');
 411  
 412      // One of: success, aborted, or skipped (all valid outcomes depending on env)
 413      const isSuccess = result.metrics.success === 1;
 414      const isAborted = result.metrics.aborted === 1;
 415      const isSkipped = result.metrics.skipped === 1;
 416      assert.ok(
 417        isSuccess || isAborted || isSkipped,
 418        `expected success=1, aborted=1, or skipped=1; got: ${JSON.stringify(result.metrics)}`
 419      );
 420    });
 421  
 422    test('successful backup includes rotation details', async () => {
 423      const isDangling = backupDirIsDanglingSymlink();
 424      if (!isDangling && !backupDirIsAccessible()) {
 425        mkdirSync(REAL_BACKUP_DIR, { recursive: true });
 426        createdBackupDir = true;
 427      }
 428  
 429      const { HANDLERS } = cronModule;
 430      const result = await HANDLERS.backupDatabase();
 431  
 432      if (result.metrics.success === 1) {
 433        assert.ok(typeof result.details === 'object', 'details should be an object');
 434        assert.ok('gz_path' in result.details, 'details should have gz_path');
 435        assert.ok('tier' in result.details, 'details should have tier');
 436        assert.ok('site_count' in result.details, 'details should have site_count');
 437        assert.ok('rotation' in result.details, 'details should have rotation');
 438        assert.ok(typeof result.metrics.size_mb === 'number', 'metrics.size_mb should be a number');
 439        assert.ok(
 440          typeof result.metrics.backups_kept === 'number',
 441          'metrics.backups_kept should be a number'
 442        );
 443      }
 444      // Non-success paths are valid too; just confirm summary is set
 445      assert.ok(result.summary.length > 0, 'summary should be non-empty');
 446    });
 447  });
 448  
 449  // ── Tests: analyzePerformance ─────────────────────────────────────────────────
 450  
 451  describe('HANDLERS.analyzePerformance — structure and missing-ANALYZE path', () => {
 452    test('returns structured result with tables_analyzed, index_count, recommendations_found', async () => {
 453      const { HANDLERS } = cronModule;
 454      const result = await HANDLERS.analyzePerformance();
 455  
 456      assert.ok(typeof result === 'object', 'should return an object');
 457      assert.ok(typeof result.summary === 'string', 'should have a summary string');
 458      assert.ok(typeof result.metrics === 'object', 'should have a metrics object');
 459      assert.ok('tables_analyzed' in result.metrics, 'metrics should have tables_analyzed');
 460      assert.ok('index_count' in result.metrics, 'metrics should have index_count');
 461      assert.ok(
 462        'recommendations_found' in result.metrics,
 463        'metrics should have recommendations_found'
 464      );
 465      assert.ok('new_recommendations' in result.metrics, 'metrics should have new_recommendations');
 466    });
 467  
 468    test('recommendations_found >= 0 (missing ANALYZE path fires on fresh DB)', async () => {
 469      // A fresh test DB has no sqlite_stat1 table, so the "missing ANALYZE stats" path triggers.
 470      const { HANDLERS } = cronModule;
 471      const result = await HANDLERS.analyzePerformance();
 472      assert.ok(
 473        result.metrics.recommendations_found >= 0,
 474        `recommendations_found should be non-negative: ${result.metrics.recommendations_found}`
 475      );
 476    });
 477  
 478    test('details contains table_stats and recommendations arrays', async () => {
 479      const { HANDLERS } = cronModule;
 480      const result = await HANDLERS.analyzePerformance();
 481  
 482      assert.ok(Array.isArray(result.details.table_stats), 'details.table_stats should be array');
 483      assert.ok(
 484        Array.isArray(result.details.recommendations),
 485        'details.recommendations should be array'
 486      );
 487      assert.ok(
 488        typeof result.details.index_count === 'number',
 489        'details.index_count should be a number'
 490      );
 491    });
 492  
 493    test('missing ANALYZE recommendation fires when no sqlite_stat1 exists', async () => {
 494      // On a brand-new test DB, sqlite_stat1 will not exist OR will have no rows.
 495      // Either path fires the missing-ANALYZE recommendation.
 496      const { HANDLERS } = cronModule;
 497      const result = await HANDLERS.analyzePerformance();
 498  
 499      // The recommendation for missing ANALYZE should be in the list
 500      const missingAnalyzeRec = result.details.recommendations.find(
 501        r => r.file === 'database:missing_analyze'
 502      );
 503      assert.ok(
 504        missingAnalyzeRec !== undefined,
 505        'should have a missing_analyze recommendation on a fresh DB'
 506      );
 507      assert.equal(missingAnalyzeRec.priority, 'low', 'missing_analyze should have low priority');
 508    });
 509  });
 510  
 511  // ── Tests: checkKeywords with seeded data ─────────────────────────────────────
 512  
 513  describe('HANDLERS.checkKeywords — with seeded keyword data', () => {
 514    test('returns correct pending and active counts matching seeded rows', async () => {
 515      // Seed keywords: 3 pending, 2 active
 516      const db = openDb();
 517      db.prepare('DELETE FROM keywords').run();
 518      db.prepare("INSERT INTO keywords (keyword, status) VALUES ('kw1', 'pending')").run();
 519      db.prepare("INSERT INTO keywords (keyword, status) VALUES ('kw2', 'pending')").run();
 520      db.prepare("INSERT INTO keywords (keyword, status) VALUES ('kw3', 'pending')").run();
 521      db.prepare("INSERT INTO keywords (keyword, status) VALUES ('kw4', 'active')").run();
 522      db.prepare("INSERT INTO keywords (keyword, status) VALUES ('kw5', 'active')").run();
 523      db.close();
 524  
 525      const { HANDLERS } = cronModule;
 526      const result = await HANDLERS.checkKeywords();
 527  
 528      assert.ok(typeof result === 'object', 'should return object');
 529      assert.ok(typeof result.summary === 'string', 'should have summary string');
 530      assert.ok(typeof result.metrics === 'object', 'should have metrics');
 531      assert.equal(result.metrics.pending, 3, 'metrics.pending should be 3');
 532      assert.equal(result.metrics.active, 2, 'metrics.active should be 2');
 533      assert.equal(result.metrics.total, 5, 'metrics.total should be 5');
 534    });
 535  
 536    test('summary mentions pending count', async () => {
 537      const { HANDLERS } = cronModule;
 538      const result = await HANDLERS.checkKeywords();
 539      assert.ok(
 540        result.summary.includes('pending'),
 541        `summary should mention "pending": ${result.summary}`
 542      );
 543    });
 544  
 545    test('works with empty keywords table (zero counts)', async () => {
 546      const db = openDb();
 547      db.prepare('DELETE FROM keywords').run();
 548      db.close();
 549  
 550      const { HANDLERS } = cronModule;
 551      const result = await HANDLERS.checkKeywords();
 552  
 553      assert.equal(result.metrics.pending, 0, 'should report 0 pending keywords');
 554      assert.equal(result.metrics.active, 0, 'should report 0 active keywords');
 555      assert.equal(result.metrics.total, 0, 'should report 0 total keywords');
 556    });
 557  });
 558  
 559  // ── Tests: purgeSiteStatusHistory with overflow data ─────────────────────────
 560  
 561  describe('HANDLERS.purgeSiteStatusHistory — with overflow site_status rows', () => {
 562    test('deletes excess rows (>5 per site) and reports rows_deleted', async () => {
 563      const db = openDb();
 564  
 565      // Ensure we have a site to reference
 566      db.prepare('DELETE FROM site_status').run();
 567      const siteResult = db
 568        .prepare("INSERT OR IGNORE INTO sites (domain, status) VALUES ('purgetest.com', 'found')")
 569        .run();
 570      const siteId =
 571        siteResult.lastInsertRowid ||
 572        db.prepare("SELECT id FROM sites WHERE domain='purgetest.com'").get().id;
 573  
 574      // Insert 10 site_status rows for this site (handler keeps only 5)
 575      for (let i = 0; i < 10; i++) {
 576        db.prepare(
 577          "INSERT INTO site_status (site_id, status, created_at) VALUES (?, ?, datetime('now', '+' || ? || ' seconds'))"
 578        ).run(siteId, 'found', String(i));
 579      }
 580  
 581      const beforeCount = db.prepare('SELECT COUNT(*) as n FROM site_status').get().n;
 582      db.close();
 583  
 584      assert.equal(beforeCount, 10, 'should have 10 rows before purge');
 585  
 586      const { HANDLERS } = cronModule;
 587      const result = await HANDLERS.purgeSiteStatusHistory();
 588  
 589      assert.ok(typeof result === 'object', 'should return object');
 590      assert.ok(typeof result.summary === 'string', 'should have summary');
 591      assert.ok(typeof result.metrics === 'object', 'should have metrics');
 592      assert.ok('rows_deleted' in result.metrics, 'metrics should have rows_deleted');
 593      assert.ok('rows_remaining' in result.metrics, 'metrics should have rows_remaining');
 594      assert.ok(result.metrics.rows_deleted >= 0, 'rows_deleted should be non-negative');
 595    });
 596  
 597    test('rows_deleted > 0 when more than 5 rows exist for a single site', async () => {
 598      const db = openDb();
 599      db.prepare('DELETE FROM site_status').run();
 600  
 601      // Use site id 1 (already inserted above or insert fresh)
 602      db.prepare(
 603        "INSERT OR IGNORE INTO sites (id, domain, status) VALUES (999, 'purge2.com', 'found')"
 604      ).run();
 605      for (let i = 0; i < 8; i++) {
 606        db.prepare("INSERT INTO site_status (site_id, status) VALUES (999, 'found')").run();
 607      }
 608      db.close();
 609  
 610      const { HANDLERS } = cronModule;
 611      const result = await HANDLERS.purgeSiteStatusHistory();
 612  
 613      // 8 rows for one site: should delete 3 (keep 5)
 614      assert.ok(
 615        result.metrics.rows_deleted >= 3,
 616        `expected >= 3 rows deleted, got ${result.metrics.rows_deleted}`
 617      );
 618    });
 619  
 620    test('no rows deleted when site has 5 or fewer status rows', async () => {
 621      const db = openDb();
 622      db.prepare('DELETE FROM site_status').run();
 623      db.prepare(
 624        "INSERT OR IGNORE INTO sites (id, domain, status) VALUES (888, 'nodrop.com', 'found')"
 625      ).run();
 626      for (let i = 0; i < 3; i++) {
 627        db.prepare("INSERT INTO site_status (site_id, status) VALUES (888, 'found')").run();
 628      }
 629      db.close();
 630  
 631      const { HANDLERS } = cronModule;
 632      const result = await HANDLERS.purgeSiteStatusHistory();
 633  
 634      assert.equal(result.metrics.rows_deleted, 0, 'should not delete rows when only 3 exist');
 635    });
 636  });
 637  
 638  // ── Tests: checkRateLimits with missing env vars ──────────────────────────────
 639  
 640  describe('HANDLERS.checkRateLimits — API key presence detection', () => {
 641    test('reports missing keys when API keys are unset', async () => {
 642      // Save originals
 643      const savedZenrows = process.env.ZENROWS_API_KEY;
 644      const savedOpenrouter = process.env.OPENROUTER_API_KEY;
 645      const savedResend = process.env.RESEND_API_KEY;
 646      const savedTwilio = process.env.TWILIO_ACCOUNT_SID;
 647  
 648      // Remove all API keys
 649      delete process.env.ZENROWS_API_KEY;
 650      delete process.env.OPENROUTER_API_KEY;
 651      delete process.env.RESEND_API_KEY;
 652      delete process.env.TWILIO_ACCOUNT_SID;
 653  
 654      const { HANDLERS } = cronModule;
 655      const result = await HANDLERS.checkRateLimits();
 656  
 657      // Restore
 658      if (savedZenrows !== undefined) process.env.ZENROWS_API_KEY = savedZenrows;
 659      if (savedOpenrouter !== undefined) process.env.OPENROUTER_API_KEY = savedOpenrouter;
 660      if (savedResend !== undefined) process.env.RESEND_API_KEY = savedResend;
 661      if (savedTwilio !== undefined) process.env.TWILIO_ACCOUNT_SID = savedTwilio;
 662  
 663      assert.ok(typeof result === 'object', 'should return object');
 664      assert.ok(typeof result.summary === 'string', 'should have summary');
 665      assert.ok('missing' in result.metrics, 'metrics should have missing');
 666      assert.ok('configured' in result.metrics, 'metrics should have configured');
 667      assert.ok(
 668        result.metrics.missing >= 1,
 669        `should report at least 1 missing key; got ${result.metrics.missing}`
 670      );
 671    });
 672  
 673    test('reports configured keys when API keys are set', async () => {
 674      // Save originals
 675      const savedZenrows = process.env.ZENROWS_API_KEY;
 676      const savedOpenrouter = process.env.OPENROUTER_API_KEY;
 677      const savedResend = process.env.RESEND_API_KEY;
 678      const savedTwilio = process.env.TWILIO_ACCOUNT_SID;
 679  
 680      // Set all API keys
 681      process.env.ZENROWS_API_KEY = 'test-zenrows-key';
 682      process.env.OPENROUTER_API_KEY = 'test-openrouter-key';
 683      process.env.RESEND_API_KEY = 'test-resend-key';
 684      process.env.TWILIO_ACCOUNT_SID = 'test-twilio-sid';
 685  
 686      const { HANDLERS } = cronModule;
 687      const result = await HANDLERS.checkRateLimits();
 688  
 689      // Restore
 690      if (savedZenrows !== undefined) process.env.ZENROWS_API_KEY = savedZenrows;
 691      else delete process.env.ZENROWS_API_KEY;
 692      if (savedOpenrouter !== undefined) process.env.OPENROUTER_API_KEY = savedOpenrouter;
 693      else delete process.env.OPENROUTER_API_KEY;
 694      if (savedResend !== undefined) process.env.RESEND_API_KEY = savedResend;
 695      else delete process.env.RESEND_API_KEY;
 696      if (savedTwilio !== undefined) process.env.TWILIO_ACCOUNT_SID = savedTwilio;
 697      else delete process.env.TWILIO_ACCOUNT_SID;
 698  
 699      assert.equal(result.metrics.configured, 4, 'should report 4 configured keys');
 700      assert.equal(result.metrics.missing, 0, 'should report 0 missing keys');
 701    });
 702  
 703    test('summary string reflects configured vs missing counts', async () => {
 704      const { HANDLERS } = cronModule;
 705      const result = await HANDLERS.checkRateLimits();
 706      // Summary format: "API keys: N configured, M missing"
 707      assert.ok(result.summary.includes('configured'), 'summary should mention configured');
 708      assert.ok(result.summary.includes('missing'), 'summary should mention missing');
 709    });
 710  });
 711  
 712  // ── Tests: unifiedAutofix error path ─────────────────────────────────────────
 713  
 714  describe('HANDLERS.unifiedAutofix — error path (script not found)', () => {
 715    test('returns error result when scripts/unified-autofix.js does not exist', async () => {
 716      // The unified-autofix script may or may not exist in test env.
 717      // Either way: if it fails (missing or erroring), the catch block runs.
 718      // If it succeeds, we get the success shape.
 719      const { HANDLERS } = cronModule;
 720      const result = await HANDLERS.unifiedAutofix();
 721  
 722      assert.ok(typeof result === 'object', 'should return object regardless of success/failure');
 723      assert.ok(typeof result.summary === 'string', 'should always have summary');
 724      assert.ok(typeof result.metrics === 'object', 'should always have metrics');
 725  
 726      if (result.metrics.success === 0) {
 727        // Error path (lines 1298-1312)
 728        assert.equal(result.metrics.failed, 1, 'failed metric should be 1 on error');
 729        assert.ok(typeof result.details.error === 'string', 'details should have error string');
 730        assert.ok(
 731          result.summary.includes('failed') || result.summary.includes('fail'),
 732          `summary should mention failure: ${result.summary}`
 733        );
 734      } else {
 735        // Success path (script ran without error)
 736        assert.equal(result.metrics.success, 1, 'success metric should be 1 on success');
 737      }
 738    });
 739  });
 740  
 741  // ── Tests: logTaskComplete with items_processed field ─────────────────────────
 742  //
 743  // logTaskComplete priority: metrics.processed > items_processed > processed > default 1
 744  // We need a handler that returns { items_processed: N } at the top level (not under metrics).
 745  // None of the standard HANDLERS return this shape, so we use runCron with a command-type job
 746  // that exits 0 and check that items_processed defaults to 1 (the fallback path).
 747  //
 748  // To cover the actual items_processed branch (line 1569-1570), we need a result where
 749  // metrics.processed is undefined but items_processed IS defined. We achieve this by
 750  // injecting a custom job whose HANDLER wraps in items_processed.
 751  
 752  describe('logTaskComplete — items_processed and metrics.processed field branches', () => {
 753    test('items_processed=1 default when handler returns {summary, details, metrics:{deleted}} shape', async () => {
 754      // cleanupTestDbs returns { summary, details, metrics: { deleted, freed_kb } }
 755      // metrics.processed = undefined → items_processed = undefined → processed = undefined → default 1
 756      const db = openDb();
 757      clearJobs(db);
 758      seedJob(db, {
 759        name: 'CleanupSupplement',
 760        task_key: 'cleanupTestDbs',
 761        enabled: 1,
 762        handler_type: 'function',
 763        last_run_at: null,
 764        critical: 0,
 765      });
 766      db.close();
 767  
 768      const { runCron } = cronModule;
 769      await assert.doesNotReject(() => runCron());
 770  
 771      const db2 = openDb();
 772      const log = db2
 773        .prepare(
 774          "SELECT * FROM cron_job_logs WHERE job_name = 'CleanupSupplement' ORDER BY id DESC LIMIT 1"
 775        )
 776        .get();
 777      db2.close();
 778  
 779      assert.ok(log, 'should have a log entry');
 780      assert.equal(log.status, 'success', 'cleanupTestDbs should succeed');
 781      // Default: items_processed = 1 (no .metrics.processed, no .items_processed, no .processed)
 782      assert.equal(log.items_processed, 1, 'items_processed should default to 1');
 783    });
 784  
 785    test('items_processed reflects metrics.processed when handler returns it', async () => {
 786      // processReaper returns { summary, details, metrics: { zombie_count, free_mem_mb, ... } }
 787      // metrics.processed = undefined → falls to items_processed branch → undefined → falls to processed → undefined → default 1
 788      // syncEmailEvents returns { summary, metrics: { synced, errors } } → same default path
 789      //
 790      // pollFreeScans via mock returns { processed: 0, inserted: 0, failed: 0 }
 791      // The HANDLER wraps it: { summary, details, metrics: { processed: 0, inserted: 0, failed: 0 } }
 792      // → metrics.processed = 0 → hits line 1568!
 793      const db = openDb();
 794      clearJobs(db);
 795      seedJob(db, {
 796        name: 'PollFreeScansSupp',
 797        task_key: 'pollFreeScans',
 798        enabled: 1,
 799        handler_type: 'function',
 800        last_run_at: null,
 801        critical: 0,
 802      });
 803      db.close();
 804  
 805      const { runCron } = cronModule;
 806      await assert.doesNotReject(() => runCron());
 807  
 808      const db2 = openDb();
 809      const log = db2
 810        .prepare(
 811          "SELECT * FROM cron_job_logs WHERE job_name = 'PollFreeScansSupp' ORDER BY id DESC LIMIT 1"
 812        )
 813        .get();
 814      db2.close();
 815  
 816      assert.ok(log, 'should have a log entry for pollFreeScans');
 817      assert.equal(log.status, 'success', 'pollFreeScans should succeed');
 818      // metrics.processed = 0 (from mock), so items_processed should be 0
 819      assert.equal(log.items_processed, 0, 'items_processed should equal metrics.processed (0)');
 820    });
 821  });
 822  
 823  // ── Tests: executeCommand via runCron command-type job ────────────────────────
 824  
 825  describe('executeCommand — via runCron command-type job', () => {
 826    test('runs echo command and records success log', async () => {
 827      const db = openDb();
 828      clearJobs(db);
 829      seedJob(db, {
 830        name: 'EchoSupp',
 831        task_key: 'echoSupp',
 832        enabled: 1,
 833        handler_type: 'command',
 834        handler_value: 'echo hello-supplement',
 835        last_run_at: null,
 836        timeout_seconds: 15,
 837        critical: 0,
 838      });
 839      db.close();
 840  
 841      const { runCron } = cronModule;
 842      await assert.doesNotReject(() => runCron());
 843  
 844      const db2 = openDb();
 845      const log = db2
 846        .prepare("SELECT * FROM cron_job_logs WHERE job_name = 'EchoSupp' ORDER BY id DESC LIMIT 1")
 847        .get();
 848      db2.close();
 849  
 850      assert.ok(log, 'should have created a log entry');
 851      assert.equal(log.status, 'success', 'echo command should succeed');
 852      assert.ok(log.summary.includes('completed'), `summary should say completed: ${log.summary}`);
 853    });
 854  
 855    test('command job with pipeline stage name (lock path) runs and completes', async () => {
 856      // Pipeline stage jobs (handler_value contains 'scoring', etc.) get a lock key set via
 857      // isPipelineStage check. We use a safe command that exits 0 quickly.
 858      // We verify the job runs and is logged (success or failure) — we don't assert on
 859      // the lock state since it's cleared async inside the child process exit handler.
 860      const db = openDb();
 861      clearJobs(db);
 862      seedJob(db, {
 863        name: 'ScoringSupp',
 864        task_key: 'scoringSupp',
 865        enabled: 1,
 866        handler_type: 'command',
 867        // Use 'node -e ...' instead of echo to avoid pipeline-stage substring match issues
 868        // 'scoring' appears in handler_value so isPipelineStage = true → lock key is set
 869        handler_value: 'node -e "process.exit(0)" scoring',
 870        last_run_at: null,
 871        timeout_seconds: 15,
 872        critical: 0,
 873      });
 874      db.close();
 875  
 876      const origExit = process.exit;
 877      process.exit = () => {};
 878  
 879      const { runCron } = cronModule;
 880      await assert.doesNotReject(() => runCron());
 881  
 882      process.exit = origExit;
 883  
 884      const db2 = openDb();
 885      const log = db2
 886        .prepare(
 887          "SELECT * FROM cron_job_logs WHERE job_name = 'ScoringSupp' ORDER BY id DESC LIMIT 1"
 888        )
 889        .get();
 890      db2.close();
 891  
 892      assert.ok(log, 'should have a log entry for the pipeline stage job');
 893      // Job should have a status (success or failed depending on node availability)
 894      assert.ok(
 895        log.status === 'success' || log.status === 'failed',
 896        `log status should be success or failed: ${log.status}`
 897      );
 898    });
 899  
 900    test('failed command records error in cron_job_logs', async () => {
 901      const db = openDb();
 902      clearJobs(db);
 903      seedJob(db, {
 904        name: 'FailingSupp',
 905        task_key: 'failingSupp',
 906        enabled: 1,
 907        handler_type: 'command',
 908        // Command that exits non-zero
 909        handler_value: 'false',
 910        last_run_at: null,
 911        timeout_seconds: 10,
 912        critical: 0,
 913      });
 914      db.close();
 915  
 916      const origExit = process.exit;
 917      process.exit = () => {};
 918  
 919      const { runCron } = cronModule;
 920      await assert.doesNotReject(() => runCron());
 921  
 922      process.exit = origExit;
 923  
 924      const db2 = openDb();
 925      const log = db2
 926        .prepare(
 927          "SELECT * FROM cron_job_logs WHERE job_name = 'FailingSupp' ORDER BY id DESC LIMIT 1"
 928        )
 929        .get();
 930      db2.close();
 931  
 932      assert.ok(log, 'should have a log entry for failing command');
 933      assert.equal(log.status, 'failed', 'command that returns non-zero should be logged as failed');
 934    });
 935  });
 936  
 937  // ── Tests: checkAndClearStaleLock — covered indirectly via runCron ────────────
 938  // (The stale lock clearing is tested in cron-runcron.test.js; here we just
 939  //  verify the handler's own use of DB locks doesn't leave orphans.)
 940  
 941  describe('HANDLERS side effect: no stale locks after handler completes', () => {
 942    test('cron_locks table is clean after running backupDatabase (skipped path)', async () => {
 943      // When backup dir does NOT exist, handler returns early — no lock is set.
 944      // (backupDatabase doesn't use a lock itself; executeCommand does.)
 945      // This test confirms the DB is still usable after handler call.
 946      const { HANDLERS } = cronModule;
 947  
 948      // Make backup dir not exist for this call by temporarily moving it if it was created
 949      // Actually just run backupDatabase and verify DB is consistent.
 950      await assert.doesNotReject(() => HANDLERS.backupDatabase());
 951  
 952      const db = openDb();
 953      // DB should be open and queryable
 954      const lockCount = db.prepare('SELECT COUNT(*) as n FROM cron_locks').get().n;
 955      db.close();
 956      assert.ok(lockCount >= 0, 'DB should be queryable after backupDatabase');
 957    });
 958  });
 959  
 960  // ── Tests: analyzePerformance — fragmentation detection path ──────────────────
 961  
 962  describe('HANDLERS.analyzePerformance — repeated calls (deduplication of recommendations)', () => {
 963    test('second call does not add duplicate recommendations', async () => {
 964      const { HANDLERS } = cronModule;
 965  
 966      // Run twice
 967      const result1 = await HANDLERS.analyzePerformance();
 968      const result2 = await HANDLERS.analyzePerformance();
 969  
 970      // Second call should find 0 new recommendations (already inserted as pending)
 971      assert.equal(
 972        result2.metrics.new_recommendations,
 973        0,
 974        'second call should not insert duplicate recommendations'
 975      );
 976      // But recommendations_found may still be > 0 (found via EXPLAIN etc.)
 977      assert.ok(
 978        result2.metrics.recommendations_found >= 0,
 979        'recommendations_found should be non-negative'
 980      );
 981    });
 982  });
 983  
 984  // ── Tests: diskCleanup handler ────────────────────────────────────────────────
 985  //
 986  // diskCleanup deletes coverage/ and other dirs. In the test sandbox these dirs
 987  // may exist with restricted permissions (EACCES). We handle both cases:
 988  // success (structured result) or permission-related throw.
 989  
 990  describe('HANDLERS.diskCleanup — runs or fails gracefully', () => {
 991    test('returns structured result or throws permission error', async () => {
 992      const { HANDLERS } = cronModule;
 993  
 994      let result;
 995      try {
 996        result = await HANDLERS.diskCleanup();
 997      } catch (err) {
 998        // Permission errors are acceptable in the test environment (coverage dirs owned by root etc.)
 999        if (err.code === 'EACCES' || err.code === 'ENOTEMPTY' || err.code === 'EPERM') {
1000          // Handler threw due to permissions — this is a known sandbox limitation.
1001          // The code path IS covered (we entered diskCleanup and ran until the error).
1002          assert.ok(err.message, 'should have an error message');
1003          return;
1004        }
1005        throw err; // re-throw unexpected errors
1006      }
1007  
1008      assert.ok(typeof result === 'object', 'should return object');
1009      assert.ok(typeof result.summary === 'string', 'should have summary');
1010      assert.ok(typeof result.metrics === 'object', 'should have metrics');
1011      assert.ok('actions' in result.metrics, 'metrics should have actions count');
1012      assert.ok('freed_mb' in result.metrics, 'metrics should have freed_mb');
1013    });
1014  });
1015  
1016  // ── Tests: technicalDebtReview handler ───────────────────────────────────────
1017  
1018  describe('HANDLERS.technicalDebtReview — error path (TODO.md may not exist in test env)', () => {
1019    test('returns structured result (success or graceful error)', async () => {
1020      const { HANDLERS } = cronModule;
1021      const result = await HANDLERS.technicalDebtReview();
1022  
1023      assert.ok(typeof result === 'object', 'should return object');
1024      assert.ok(typeof result.summary === 'string', 'should have summary');
1025      assert.ok(typeof result.metrics === 'object', 'should have metrics');
1026  
1027      if (result.metrics.success === 0) {
1028        // Error path: TODO.md not found
1029        assert.ok(
1030          result.summary.includes('Could not'),
1031          `summary should mention failure: ${result.summary}`
1032        );
1033      } else {
1034        // Success path: TODO.md exists and was parsed
1035        assert.ok('incomplete' in result.metrics, 'metrics should have incomplete');
1036        assert.ok('completed' in result.metrics, 'metrics should have completed');
1037      }
1038    });
1039  });
1040  
1041  // ── Tests: rotateLogs handler ─────────────────────────────────────────────────
1042  
1043  describe('HANDLERS.rotateLogs — delegates to mock rotateLogs', () => {
1044    test('returns structured result with deleted, kept, freed_mb', async () => {
1045      const { HANDLERS } = cronModule;
1046      const result = await HANDLERS.rotateLogs();
1047  
1048      assert.ok(typeof result === 'object', 'should return object');
1049      assert.ok(typeof result.summary === 'string', 'should have summary');
1050      assert.ok(typeof result.metrics === 'object', 'should have metrics');
1051      assert.ok('deleted' in result.metrics, 'metrics should have deleted');
1052      assert.ok('kept' in result.metrics, 'metrics should have kept');
1053      assert.ok('freed_mb' in result.metrics, 'metrics should have freed_mb');
1054    });
1055  });