/ tests / cron / pipeline-status-monitor-supplement2.test.js
pipeline-status-monitor-supplement2.test.js
  1  /**
  2   * Pipeline Status Monitor — Supplement 2
  3   *
  4   * Targets functions with low/zero coverage by mocking db.js via pg-mock pattern:
  5   *   - checkCronHealth: no rows, healthy, overdue, never ran, unknown unit
  6   *   - retryFailingSites: dead-letter promotion, reset batch, no retryable sites
  7   *   - promoteStrandedScoredSites: ENABLE_VISION=false/true paths
  8   *   - rateLimitStatus: no file, expired entries, active entry, malformed JSON
  9   *   - siteProgression: rows returned, empty
 10   *   - agentTaskStatus: rows returned with per-agent + totals
 11   *   - stuckSites: normal count, error path
 12   *   - checkZombies: 0, low, critical
 13   *   - checkPipelineHung: active log, hung log triggering restart, proposals log selected
 14   *   - siteStatusDistribution: delta calculation
 15   *   - cronErrors: journalctl errors, false-positive filtering
 16   *   - runPipelineStatusMonitor return structure edge cases
 17   *
 18   * MUST be run with --experimental-test-module-mocks.
 19   */
 20  
 21  import { test, describe, mock, beforeEach } from 'node:test';
 22  import assert from 'node:assert/strict';
 23  import Database from 'better-sqlite3';
 24  import { createPgMock } from '../helpers/pg-mock.js';
 25  
 26  // ═══════════════════════════════════════════════════════════════
 27  // In-memory SQLite DB for pg-mock
 28  // ═══════════════════════════════════════════════════════════════
 29  
 30  const db = new Database(':memory:');
 31  
 32  db.exec(`
 33    CREATE TABLE IF NOT EXISTS sites (
 34      id INTEGER PRIMARY KEY AUTOINCREMENT,
 35      status TEXT NOT NULL DEFAULT 'found',
 36      error_message TEXT,
 37      retry_count INTEGER DEFAULT 0,
 38      recapture_at TEXT,
 39      chronic_failure_count INTEGER DEFAULT 0,
 40      updated_at TEXT DEFAULT (datetime('now')),
 41      created_at TEXT DEFAULT (datetime('now'))
 42    );
 43  
 44    CREATE TABLE IF NOT EXISTS site_status (
 45      id INTEGER PRIMARY KEY AUTOINCREMENT,
 46      site_id INTEGER,
 47      status TEXT NOT NULL,
 48      created_at TEXT DEFAULT (datetime('now'))
 49    );
 50  
 51    CREATE TABLE IF NOT EXISTS pipeline_control (
 52      id INTEGER PRIMARY KEY,
 53      last_browser_loop_at TEXT,
 54      last_api_loop_at TEXT
 55    );
 56  
 57    CREATE TABLE IF NOT EXISTS cron_jobs (
 58      id INTEGER PRIMARY KEY AUTOINCREMENT,
 59      task_key TEXT UNIQUE NOT NULL,
 60      interval_value INTEGER NOT NULL,
 61      interval_unit TEXT NOT NULL,
 62      last_run_at TEXT,
 63      enabled INTEGER DEFAULT 1
 64    );
 65  
 66    CREATE TABLE IF NOT EXISTS agent_tasks (
 67      id INTEGER PRIMARY KEY AUTOINCREMENT,
 68      assigned_to TEXT NOT NULL,
 69      status TEXT NOT NULL DEFAULT 'pending',
 70      created_at TEXT DEFAULT (datetime('now'))
 71    );
 72  `);
 73  
 74  // ─── pg-mock wires db.js to our in-memory SQLite ─────────────────────────────
 75  mock.module('../../src/utils/db.js', { namedExports: createPgMock(db) });
 76  
 77  // ─── Other mocks ─────────────────────────────────────────────────────────────
 78  
 79  mock.module('dotenv', {
 80    defaultExport: { config: () => {} },
 81    namedExports: { config: () => {} },
 82  });
 83  
 84  const execSyncMock = mock.fn(() => 'active\n');
 85  mock.module('child_process', {
 86    namedExports: { execSync: execSyncMock },
 87  });
 88  
 89  let capturedAppendCalls = [];
 90  let capturedWriteCalls = [];
 91  let mockFileContents = {};
 92  let mockExistsResults = {};
 93  let mockReaddirResults = {};
 94  let mockStatResults = {};
 95  let appendShouldFail = false;
 96  let appendCallCount = 0;
 97  
 98  const appendFileSyncMock = mock.fn((path, content) => {
 99    appendCallCount++;
100    if (appendShouldFail && appendCallCount === 1) {
101      throw new Error('ENOENT: no such file or directory');
102    }
103    capturedAppendCalls.push({ path, content });
104  });
105  
106  const readFileSyncMock = mock.fn((path, _encoding) => {
107    if (mockFileContents[path] !== undefined) return mockFileContents[path];
108    for (const [key, val] of Object.entries(mockFileContents)) {
109      if (path.includes(key)) return val;
110    }
111    throw new Error(`ENOENT: no such file: ${path}`);
112  });
113  
114  const writeFileSyncMock = mock.fn((path, content) => {
115    capturedWriteCalls.push({ path, content });
116  });
117  
118  const existsSyncMock = mock.fn(path => {
119    for (const [key, val] of Object.entries(mockExistsResults)) {
120      if (path.includes(key)) return val;
121    }
122    return false;
123  });
124  
125  const readdirSyncMock = mock.fn(path => {
126    for (const [key, val] of Object.entries(mockReaddirResults)) {
127      if (path.includes(key)) return val;
128    }
129    return [];
130  });
131  
132  const statSyncMock = mock.fn(path => {
133    for (const [key, val] of Object.entries(mockStatResults)) {
134      if (path.includes(key)) return val;
135    }
136    return { mtimeMs: Date.now() };
137  });
138  
139  const mkdirSyncMock = mock.fn(() => {});
140  
141  mock.module('fs', {
142    namedExports: {
143      appendFileSync: appendFileSyncMock,
144      readFileSync: readFileSyncMock,
145      writeFileSync: writeFileSyncMock,
146      existsSync: existsSyncMock,
147      readdirSync: readdirSyncMock,
148      statSync: statSyncMock,
149      mkdirSync: mkdirSyncMock,
150    },
151  });
152  
153  // llm-usage-tracker — no variances by default
154  const checkBudgetVarianceMock = mock.fn(() => []);
155  mock.module('../../src/utils/llm-usage-tracker.js', {
156    namedExports: {
157      checkBudgetVariance: checkBudgetVarianceMock,
158      logLLMUsage: mock.fn(() => {}),
159      getLLMBudget: mock.fn(() => null),
160    },
161  });
162  
163  process.env.LOGS_DIR = '/tmp/test-logs';
164  process.env.DATABASE_PATH = '/tmp/test-psm-supp2.db';
165  
166  const { runPipelineStatusMonitor } = await import('../../src/cron/pipeline-status-monitor.js');
167  
168  // ═══════════════════════════════════════════════════════════════
169  // Helpers
170  // ═══════════════════════════════════════════════════════════════
171  
172  function resetDb() {
173    db.exec(`
174      DELETE FROM sites;
175      DELETE FROM site_status;
176      DELETE FROM pipeline_control;
177      DELETE FROM cron_jobs;
178      DELETE FROM agent_tasks;
179    `);
180  }
181  
182  beforeEach(() => {
183    execSyncMock.mock.resetCalls();
184    appendFileSyncMock.mock.resetCalls();
185    readFileSyncMock.mock.resetCalls();
186    writeFileSyncMock.mock.resetCalls();
187    existsSyncMock.mock.resetCalls();
188    readdirSyncMock.mock.resetCalls();
189    statSyncMock.mock.resetCalls();
190    mkdirSyncMock.mock.resetCalls();
191    checkBudgetVarianceMock.mock.resetCalls();
192  
193    capturedAppendCalls = [];
194    capturedWriteCalls = [];
195    mockFileContents = {};
196    mockExistsResults = {};
197    mockReaddirResults = { logs: [] };
198    mockStatResults = {};
199    appendShouldFail = false;
200    appendCallCount = 0;
201  
202    checkBudgetVarianceMock.mock.mockImplementation(() => []);
203  
204    execSyncMock.mock.mockImplementation(cmd => {
205      if (cmd.includes('is-active')) return 'active\n';
206      if (cmd.includes('journalctl')) return '';
207      if (cmd.includes('ps --no-header')) return '0';
208      if (cmd.includes('show mmo-cron.service')) return 'ActiveState=active\nStateChangeTimestamp=\n';
209      return '';
210    });
211  
212    resetDb();
213  });
214  
215  // ═══════════════════════════════════════════════════════════════
216  // checkCronHealth
217  // ═══════════════════════════════════════════════════════════════
218  
219  describe('checkCronHealth — no enabled jobs', () => {
220    test('reports "no enabled cron jobs found" when table is empty', async () => {
221      await runPipelineStatusMonitor();
222  
223      const content = capturedAppendCalls[0]?.content || '';
224      assert.ok(
225        content.includes('no enabled cron jobs found'),
226        `expected "no enabled cron jobs found" in: ${content.slice(0, 400)}`
227      );
228    });
229  });
230  
231  describe('checkCronHealth — all healthy', () => {
232    test('reports all N jobs on schedule when none are overdue', async () => {
233      const recentTime = new Date(Date.now() - 5 * 60 * 1000).toISOString().slice(0, 19);
234      db.prepare('INSERT INTO cron_jobs (task_key, interval_value, interval_unit, last_run_at, enabled) VALUES (?,?,?,?,?)').run('pipeline-status', 15, 'minutes', recentTime, 1);
235      db.prepare('INSERT INTO cron_jobs (task_key, interval_value, interval_unit, last_run_at, enabled) VALUES (?,?,?,?,?)').run('poll-purchases', 5, 'minutes', recentTime, 1);
236  
237      await runPipelineStatusMonitor();
238  
239      const content = capturedAppendCalls[0]?.content || '';
240      assert.ok(
241        content.includes('All 2 cron jobs on schedule'),
242        `expected "All 2 cron jobs on schedule" in: ${content.slice(0, 500)}`
243      );
244    });
245  });
246  
247  describe('checkCronHealth — overdue jobs', () => {
248    test('reports OVERDUE when job has not run within 2x its interval', async () => {
249      const sixtyMinAgo = new Date(Date.now() - 60 * 60 * 1000).toISOString().slice(0, 19);
250      db.prepare('INSERT INTO cron_jobs (task_key, interval_value, interval_unit, last_run_at, enabled) VALUES (?,?,?,?,?)').run('pipeline-status', 15, 'minutes', sixtyMinAgo, 1);
251  
252      const result = await runPipelineStatusMonitor();
253  
254      const content = capturedAppendCalls[0]?.content || '';
255      assert.ok(
256        content.includes('OVERDUE: pipeline-status'),
257        `expected "OVERDUE: pipeline-status" in: ${content.slice(0, 500)}`
258      );
259      assert.ok(
260        result.actions.some(a => a.includes('cron_overdue_pipeline-status')),
261        'overdue action should be recorded'
262      );
263    });
264  
265    test('reports NEVER RAN for job with null last_run_at', async () => {
266      db.prepare('INSERT INTO cron_jobs (task_key, interval_value, interval_unit, last_run_at, enabled) VALUES (?,?,?,?,?)').run('new-job', 10, 'minutes', null, 1);
267  
268      await runPipelineStatusMonitor();
269  
270      const content = capturedAppendCalls[0]?.content || '';
271      assert.ok(
272        content.includes('NEVER RAN: new-job'),
273        `expected "NEVER RAN: new-job" in: ${content.slice(0, 500)}`
274      );
275    });
276  
277    test('skips rows with unknown interval_unit silently', async () => {
278      const recentTime = new Date(Date.now() - 1 * 60 * 1000).toISOString().slice(0, 19);
279      db.prepare('INSERT INTO cron_jobs (task_key, interval_value, interval_unit, last_run_at, enabled) VALUES (?,?,?,?,?)').run('weird-job', 5, 'fortnights', recentTime, 1);
280  
281      const result = await runPipelineStatusMonitor();
282      assert.ok(result, 'should return a result');
283      const content = capturedAppendCalls[0]?.content || '';
284      assert.ok(content.includes('Cron job health'), 'should have cron health section');
285    });
286  
287    test('uses hours interval unit correctly', async () => {
288      const threeHoursAgo = new Date(Date.now() - 3 * 60 * 60 * 1000).toISOString().slice(0, 19);
289      const thirtyMinAgo = new Date(Date.now() - 30 * 60 * 1000).toISOString().slice(0, 19);
290      db.prepare('INSERT INTO cron_jobs (task_key, interval_value, interval_unit, last_run_at, enabled) VALUES (?,?,?,?,?)').run('hourly-job', 1, 'hours', threeHoursAgo, 1);
291      db.prepare('INSERT INTO cron_jobs (task_key, interval_value, interval_unit, last_run_at, enabled) VALUES (?,?,?,?,?)').run('daily-job', 1, 'days', thirtyMinAgo, 1);
292  
293      await runPipelineStatusMonitor();
294  
295      const content = capturedAppendCalls[0]?.content || '';
296      assert.ok(content.includes('OVERDUE: hourly-job'), 'hourly job should be overdue');
297      assert.ok(!content.includes('OVERDUE: daily-job'), 'daily job should not be overdue');
298    });
299  
300    test('reports 1/2 jobs overdue', async () => {
301      const recentTime = new Date(Date.now() - 5 * 60 * 1000).toISOString().slice(0, 19);
302      const overdueTime = new Date(Date.now() - 3 * 60 * 60 * 1000).toISOString().slice(0, 19);
303      db.prepare('INSERT INTO cron_jobs (task_key, interval_value, interval_unit, last_run_at, enabled) VALUES (?,?,?,?,?)').run('healthy-job', 15, 'minutes', recentTime, 1);
304      db.prepare('INSERT INTO cron_jobs (task_key, interval_value, interval_unit, last_run_at, enabled) VALUES (?,?,?,?,?)').run('slow-job', 30, 'minutes', overdueTime, 1);
305  
306      await runPipelineStatusMonitor();
307  
308      const content = capturedAppendCalls[0]?.content || '';
309      assert.ok(content.includes('1/2 jobs overdue'), `expected "1/2 jobs overdue" in: ${content.slice(0, 500)}`);
310    });
311  });
312  
313  // ═══════════════════════════════════════════════════════════════
314  // retryFailingSites
315  // ═══════════════════════════════════════════════════════════════
316  
317  describe('retryFailingSites', () => {
318    test('reports dead-lettered sites when chronic failures promoted', async () => {
319      // Insert sites that qualify for dead-lettering (chronic_failure_count >= 3)
320      // and have retryable error messages
321      for (let i = 0; i < 3; i++) {
322        db.prepare(`
323          INSERT INTO sites (status, error_message, retry_count, chronic_failure_count, updated_at)
324          VALUES ('failing', 'spawn /ho error', 0, 3, datetime('now', '-2 hours'))
325        `).run();
326      }
327  
328      await runPipelineStatusMonitor();
329  
330      const content = capturedAppendCalls[0]?.content || '';
331      assert.ok(
332        content.includes('Dead-lettered') && content.includes('chronic failing sites'),
333        `expected dead-letter message in: ${content.slice(0, 600)}`
334      );
335    });
336  
337    test('reports reset sites when retryable sites found with low chronic count', async () => {
338      // Sites with chronic_failure_count < 3 should be reset to 'found'
339      for (let i = 0; i < 5; i++) {
340        db.prepare(`
341          INSERT INTO sites (status, error_message, retry_count, chronic_failure_count, updated_at)
342          VALUES ('failing', 'browserType.launch error', 0, 0, datetime('now', '-2 hours'))
343        `).run();
344      }
345  
346      await runPipelineStatusMonitor();
347  
348      const content = capturedAppendCalls[0]?.content || '';
349      assert.ok(
350        content.includes('Reset') && content.includes('retryable failing sites'),
351        `expected reset message in: ${content.slice(0, 600)}`
352      );
353  
354      // Verify sites were actually reset in DB
355      const failingCount = db.prepare("SELECT COUNT(*) as cnt FROM sites WHERE status = 'failing'").get();
356      assert.equal(failingCount.cnt, 0, 'all retryable sites should be reset');
357      const foundCount = db.prepare("SELECT COUNT(*) as cnt FROM sites WHERE status = 'found'").get();
358      assert.equal(foundCount.cnt, 5, 'sites should be reset to found');
359    });
360  
361    test('no output when no failing sites exist', async () => {
362      // No failing sites in DB
363      await runPipelineStatusMonitor();
364  
365      const content = capturedAppendCalls[0]?.content || '';
366      assert.ok(!content.includes('Dead-lettered'), 'no dead-letter message when nothing to do');
367    });
368  });
369  
370  // ═══════════════════════════════════════════════════════════════
371  // promoteStrandedScoredSites
372  // ═══════════════════════════════════════════════════════════════
373  
374  describe('promoteStrandedScoredSites', () => {
375    test('promotes stranded sites when ENABLE_VISION=false and sites exist', async () => {
376      const origVision = process.env.ENABLE_VISION;
377      process.env.ENABLE_VISION = 'false';
378  
379      // Insert prog_scored sites older than 1 hour
380      for (let i = 0; i < 3; i++) {
381        db.prepare(`
382          INSERT INTO sites (status, updated_at)
383          VALUES ('prog_scored', datetime('now', '-2 hours'))
384        `).run();
385      }
386  
387      try {
388        await runPipelineStatusMonitor();
389      } finally {
390        if (origVision !== undefined) process.env.ENABLE_VISION = origVision;
391        else delete process.env.ENABLE_VISION;
392      }
393  
394      const content = capturedAppendCalls[0]?.content || '';
395      assert.ok(
396        content.includes('Promoted') && content.includes('prog_scored'),
397        `expected promote message in: ${content.slice(0, 500)}`
398      );
399  
400      // Verify sites were promoted
401      const promoted = db.prepare("SELECT COUNT(*) as cnt FROM sites WHERE status = 'semantic_scored'").get();
402      assert.equal(promoted.cnt, 3, 'all stranded sites should be promoted');
403    });
404  
405    test('skips promote when ENABLE_VISION=true', async () => {
406      const origVision = process.env.ENABLE_VISION;
407      process.env.ENABLE_VISION = 'true';
408  
409      for (let i = 0; i < 2; i++) {
410        db.prepare(`INSERT INTO sites (status, updated_at) VALUES ('prog_scored', datetime('now', '-2 hours'))`).run();
411      }
412  
413      try {
414        await runPipelineStatusMonitor();
415      } finally {
416        if (origVision !== undefined) process.env.ENABLE_VISION = origVision;
417        else delete process.env.ENABLE_VISION;
418      }
419  
420      const content = capturedAppendCalls[0]?.content || '';
421      // prog_scored sites should NOT be promoted when ENABLE_VISION=true
422      assert.ok(!content.includes('Promoted'), 'should not promote when ENABLE_VISION=true');
423  
424      // Sites should still be prog_scored
425      const remaining = db.prepare("SELECT COUNT(*) as cnt FROM sites WHERE status = 'prog_scored'").get();
426      assert.equal(remaining.cnt, 2, 'sites should remain prog_scored when ENABLE_VISION=true');
427    });
428  
429    test('no output when no stranded sites exist', async () => {
430      const origVision = process.env.ENABLE_VISION;
431      process.env.ENABLE_VISION = 'false';
432  
433      try {
434        await runPipelineStatusMonitor();
435      } finally {
436        if (origVision !== undefined) process.env.ENABLE_VISION = origVision;
437        else delete process.env.ENABLE_VISION;
438      }
439  
440      const content = capturedAppendCalls[0]?.content || '';
441      assert.ok(!content.includes('Promoted'), 'no promote message when nothing changed');
442    });
443  });
444  
445  // ═══════════════════════════════════════════════════════════════
446  // rateLimitStatus
447  // ═══════════════════════════════════════════════════════════════
448  
449  describe('rateLimitStatus', () => {
450    test('reports "None active" when rate-limits.json does not exist', async () => {
451      mockExistsResults['rate-limits.json'] = false;
452  
453      await runPipelineStatusMonitor();
454  
455      const content = capturedAppendCalls[0]?.content || '';
456      assert.ok(
457        content.includes('None active'),
458        `expected "None active" in rate limits section: ${content.slice(0, 500)}`
459      );
460    });
461  
462    test('reports "None active" when all rate limits have expired', async () => {
463      const expiredData = {
464        openrouter: {
465          resetAt: (Date.now() - 10 * 60 * 1000).toString(),
466          stages: ['scoring'],
467          reason: 'quota exceeded',
468        },
469      };
470      mockExistsResults['rate-limits.json'] = true;
471      mockFileContents['rate-limits.json'] = JSON.stringify(expiredData);
472  
473      await runPipelineStatusMonitor();
474  
475      const content = capturedAppendCalls[0]?.content || '';
476      assert.ok(content.includes('None active'), 'expired rate limits should show None active');
477    });
478  
479    test('reports active rate limit with stage and reason', async () => {
480      const activeData = {
481        openrouter: {
482          resetAt: (Date.now() + 30 * 60 * 1000).toString(),
483          stages: ['scoring', 'proposals'],
484          reason: 'daily quota exceeded',
485        },
486      };
487      mockExistsResults['rate-limits.json'] = true;
488      mockFileContents['rate-limits.json'] = JSON.stringify(activeData);
489  
490      await runPipelineStatusMonitor();
491  
492      const content = capturedAppendCalls[0]?.content || '';
493      assert.ok(content.includes('openrouter'), `expected "openrouter" in: ${content.slice(0, 600)}`);
494      assert.ok(content.includes('daily quota exceeded'), 'should show reason');
495    });
496  
497    test('reports ERR when rate-limits.json is malformed JSON', async () => {
498      mockExistsResults['rate-limits.json'] = true;
499      mockFileContents['rate-limits.json'] = 'not valid json {{{';
500  
501      await runPipelineStatusMonitor();
502  
503      const content = capturedAppendCalls[0]?.content || '';
504      assert.ok(
505        content.includes('ERR reading rate-limits.json'),
506        `expected error message in: ${content.slice(0, 500)}`
507      );
508    });
509  });
510  
511  // ═══════════════════════════════════════════════════════════════
512  // siteProgression
513  // ═══════════════════════════════════════════════════════════════
514  
515  describe('siteProgression', () => {
516    test('reports site progression counts when sites updated recently', async () => {
517      // Insert sites with updated_at in the last 15 minutes
518      for (let i = 0; i < 42; i++) {
519        db.prepare(`INSERT INTO sites (status, updated_at) VALUES ('prog_scored', datetime('now', '-5 minutes'))`).run();
520      }
521      for (let i = 0; i < 17; i++) {
522        db.prepare(`INSERT INTO sites (status, updated_at) VALUES ('enriched', datetime('now', '-3 minutes'))`).run();
523      }
524  
525      await runPipelineStatusMonitor();
526  
527      const content = capturedAppendCalls[0]?.content || '';
528      assert.ok(content.includes('42 → prog_scored'), `expected "42 → prog_scored" in: ${content.slice(0, 600)}`);
529      assert.ok(content.includes('17 → enriched'), 'expected enriched count');
530    });
531  
532    test('reports (none) when no sites progressed recently', async () => {
533      // Insert sites that were updated 20+ minutes ago (outside the 15-min window)
534      db.prepare(`INSERT INTO sites (status, updated_at) VALUES ('found', datetime('now', '-20 minutes'))`).run();
535  
536      await runPipelineStatusMonitor();
537  
538      const content = capturedAppendCalls[0]?.content || '';
539      const progressionIdx = content.indexOf('Site progression (15min):');
540      const afterProgression = content.slice(progressionIdx, progressionIdx + 100);
541      assert.ok(afterProgression.includes('(none)'), `expected (none) after site progression header: ${afterProgression}`);
542    });
543  });
544  
545  // ═══════════════════════════════════════════════════════════════
546  // agentTaskStatus
547  // ═══════════════════════════════════════════════════════════════
548  
549  describe('agentTaskStatus', () => {
550    test('reports per-agent task counts', async () => {
551      for (let i = 0; i < 3; i++) {
552        db.prepare(`INSERT INTO agent_tasks (assigned_to, status) VALUES ('MonitorAgent', 'pending')`).run();
553      }
554      db.prepare(`INSERT INTO agent_tasks (assigned_to, status) VALUES ('MonitorAgent', 'running')`).run();
555      db.prepare(`INSERT INTO agent_tasks (assigned_to, status) VALUES ('MonitorAgent', 'complete')`).run();
556  
557      await runPipelineStatusMonitor();
558  
559      const content = capturedAppendCalls[0]?.content || '';
560      assert.ok(content.includes('MonitorAgent|pending: 3'), `expected "MonitorAgent|pending: 3" in: ${content.slice(0, 600)}`);
561      assert.ok(content.includes('MonitorAgent|running: 1'), 'expected running count');
562      assert.ok(content.includes('--- totals ---'), 'should have totals section');
563      assert.ok(content.includes('complete: 1'), 'expected complete in totals');
564    });
565  
566    test('reports totals even when no pending/running tasks exist', async () => {
567      // Only complete tasks
568      for (let i = 0; i < 5; i++) {
569        db.prepare(`INSERT INTO agent_tasks (assigned_to, status) VALUES ('SomeAgent', 'complete')`).run();
570      }
571  
572      await runPipelineStatusMonitor();
573  
574      const content = capturedAppendCalls[0]?.content || '';
575      assert.ok(content.includes('--- totals ---'), 'should have totals section');
576      assert.ok(content.includes('complete: 5'), 'expected totals complete');
577    });
578  });
579  
580  // ═══════════════════════════════════════════════════════════════
581  // stuckSites
582  // ═══════════════════════════════════════════════════════════════
583  
584  describe('stuckSites', () => {
585    test('reports stuck count correctly', async () => {
586      // Insert sites that haven't been updated in >4h and are not in terminal statuses
587      for (let i = 0; i < 7; i++) {
588        db.prepare(`INSERT INTO sites (status, updated_at) VALUES ('found', datetime('now', '-5 hours'))`).run();
589      }
590  
591      await runPipelineStatusMonitor();
592  
593      const content = capturedAppendCalls[0]?.content || '';
594      assert.ok(
595        content.includes('Stuck sites (>4h no update): 7'),
596        `expected stuck sites count in: ${content.slice(0, 500)}`
597      );
598    });
599  
600    test('reports 0 stuck sites when all sites are recently updated', async () => {
601      db.prepare(`INSERT INTO sites (status, updated_at) VALUES ('found', datetime('now', '-1 hour'))`).run();
602  
603      await runPipelineStatusMonitor();
604  
605      const content = capturedAppendCalls[0]?.content || '';
606      assert.ok(
607        content.includes('Stuck sites (>4h no update): 0'),
608        `expected 0 stuck sites in: ${content.slice(0, 500)}`
609      );
610    });
611  });
612  
613  // ═══════════════════════════════════════════════════════════════
614  // siteStatusDistribution with delta
615  // ═══════════════════════════════════════════════════════════════
616  
617  describe('siteStatusDistribution with delta', () => {
618    test('shows positive delta when status count increased', async () => {
619      mockExistsResults['pipeline-status-snapshot.json'] = true;
620      mockFileContents['pipeline-status-snapshot.json'] = JSON.stringify({ found: 100 });
621  
622      for (let i = 0; i < 115; i++) {
623        db.prepare(`INSERT INTO sites (status) VALUES ('found')`).run();
624      }
625  
626      await runPipelineStatusMonitor();
627  
628      const content = capturedAppendCalls[0]?.content || '';
629      assert.ok(content.includes('found: 115 (+15)'), `expected "+15" delta in: ${content.slice(0, 500)}`);
630    });
631  
632    test('shows negative delta when status count decreased', async () => {
633      mockExistsResults['pipeline-status-snapshot.json'] = true;
634      mockFileContents['pipeline-status-snapshot.json'] = JSON.stringify({ prog_scored: 50 });
635  
636      for (let i = 0; i < 30; i++) {
637        db.prepare(`INSERT INTO sites (status) VALUES ('prog_scored')`).run();
638      }
639  
640      await runPipelineStatusMonitor();
641  
642      const content = capturedAppendCalls[0]?.content || '';
643      assert.ok(content.includes('prog_scored: 30 (-20)'), `expected "-20" delta in: ${content.slice(0, 500)}`);
644    });
645  
646    test('shows (=) when count unchanged', async () => {
647      mockExistsResults['pipeline-status-snapshot.json'] = true;
648      mockFileContents['pipeline-status-snapshot.json'] = JSON.stringify({ found: 80 });
649  
650      for (let i = 0; i < 80; i++) {
651        db.prepare(`INSERT INTO sites (status) VALUES ('found')`).run();
652      }
653  
654      await runPipelineStatusMonitor();
655  
656      const content = capturedAppendCalls[0]?.content || '';
657      assert.ok(content.includes('found: 80 (=)'), `expected "(=)" in: ${content.slice(0, 500)}`);
658    });
659  
660    test('shows +N for new status not in previous snapshot', async () => {
661      mockExistsResults['pipeline-status-snapshot.json'] = true;
662      mockFileContents['pipeline-status-snapshot.json'] = JSON.stringify({ found: 10 });
663  
664      for (let i = 0; i < 10; i++) {
665        db.prepare(`INSERT INTO sites (status) VALUES ('found')`).run();
666      }
667      for (let i = 0; i < 25; i++) {
668        db.prepare(`INSERT INTO sites (status) VALUES ('enriched')`).run();
669      }
670  
671      await runPipelineStatusMonitor();
672  
673      const content = capturedAppendCalls[0]?.content || '';
674      assert.ok(content.includes('enriched: 25 (+25)'), `expected "+25" for new status in: ${content.slice(0, 500)}`);
675    });
676  });
677  
678  // ═══════════════════════════════════════════════════════════════
679  // cronErrors — journalctl filtering
680  // ═══════════════════════════════════════════════════════════════
681  
682  describe('cronErrors — journalctl filtering', () => {
683    test('reports error lines from journalctl', async () => {
684      execSyncMock.mock.mockImplementation(cmd => {
685        if (cmd.includes('journalctl')) {
686          return [
687            'Mar 28 10:00:00 host mmo-cron[123]: [ERROR] Failed to process site example.com',
688            'Mar 28 10:01:00 host mmo-cron[123]: info: Processing...',
689          ].join('\n');
690        }
691        if (cmd.includes('is-active')) return 'active\n';
692        if (cmd.includes('ps --no-header')) return '0';
693        if (cmd.includes('show mmo-cron.service')) return 'ActiveState=active\n';
694        return '';
695      });
696  
697      await runPipelineStatusMonitor();
698  
699      const content = capturedAppendCalls[0]?.content || '';
700      assert.ok(
701        content.includes('errors in last 15min'),
702        `expected error count in cron section: ${content.slice(0, 600)}`
703      );
704    });
705  
706    test('filters out "Errors: 0" false positives', async () => {
707      execSyncMock.mock.mockImplementation(cmd => {
708        if (cmd.includes('journalctl')) {
709          return 'Mar 28 10:00:00 host mmo-cron[123]: Errors: 0 — all good';
710        }
711        if (cmd.includes('is-active')) return 'active\n';
712        if (cmd.includes('ps --no-header')) return '0';
713        if (cmd.includes('show mmo-cron.service')) return 'ActiveState=active\n';
714        return '';
715      });
716  
717      await runPipelineStatusMonitor();
718  
719      const content = capturedAppendCalls[0]?.content || '';
720      assert.ok(
721        content.includes('No cron errors'),
722        `expected "No cron errors" when only false positives: ${content.slice(0, 500)}`
723      );
724    });
725  
726    test('reports "journalctl unavailable" when execSync throws', async () => {
727      execSyncMock.mock.mockImplementation(cmd => {
728        if (cmd.includes('journalctl')) throw new Error('D-Bus connection failure');
729        if (cmd.includes('is-active')) return 'active\n';
730        if (cmd.includes('ps --no-header')) return '0';
731        if (cmd.includes('show mmo-cron.service')) return 'ActiveState=active\n';
732        return '';
733      });
734  
735      await runPipelineStatusMonitor();
736  
737      const content = capturedAppendCalls[0]?.content || '';
738      assert.ok(
739        content.includes('journalctl unavailable'),
740        `expected "journalctl unavailable" in: ${content.slice(0, 500)}`
741      );
742    });
743  
744    test('filters out lines with test file paths', async () => {
745      execSyncMock.mock.mockImplementation(cmd => {
746        if (cmd.includes('journalctl')) {
747          return [
748            'Mar 28 10:00:00 host mmo-cron[1]: [ERROR] tests/cron/pipeline.test.js Error in test',
749            'Mar 28 10:00:01 host mmo-cron[1]: [ERROR] Real production Error: timeout',
750          ].join('\n');
751        }
752        if (cmd.includes('is-active')) return 'active\n';
753        if (cmd.includes('ps --no-header')) return '0';
754        if (cmd.includes('show mmo-cron.service')) return 'ActiveState=active\n';
755        return '';
756      });
757  
758      await runPipelineStatusMonitor();
759  
760      const content = capturedAppendCalls[0]?.content || '';
761      assert.ok(content.includes('Real production Error'), 'should show real error');
762    });
763  });
764  
765  // ═══════════════════════════════════════════════════════════════
766  // checkZombies
767  // ═══════════════════════════════════════════════════════════════
768  
769  describe('checkZombies', () => {
770    test('reports no zombie processes when count=0', async () => {
771      execSyncMock.mock.mockImplementation(cmd => {
772        if (cmd.includes('ps --no-header')) return '0\n';
773        if (cmd.includes('is-active')) return 'active\n';
774        if (cmd.includes('journalctl')) return '';
775        if (cmd.includes('show mmo-cron.service')) return 'ActiveState=active\n';
776        return '';
777      });
778  
779      await runPipelineStatusMonitor();
780  
781      const content = capturedAppendCalls[0]?.content || '';
782      assert.ok(content.includes('No zombie processes'), `expected "No zombie processes" in: ${content.slice(0, 500)}`);
783    });
784  
785    test('reports low zombie count as monitoring warning', async () => {
786      execSyncMock.mock.mockImplementation(cmd => {
787        if (cmd.includes('ps --no-header')) return '5\n';
788        if (cmd.includes('is-active')) return 'active\n';
789        if (cmd.includes('journalctl')) return '';
790        if (cmd.includes('show mmo-cron.service')) return 'ActiveState=active\n';
791        return '';
792      });
793  
794      await runPipelineStatusMonitor();
795  
796      const content = capturedAppendCalls[0]?.content || '';
797      assert.ok(
798        content.includes('Zombie processes: 5') && content.includes('monitoring'),
799        `expected low zombie warning in: ${content.slice(0, 500)}`
800      );
801    });
802  
803    test('reports critical zombie count when >= 50', async () => {
804      execSyncMock.mock.mockImplementation(cmd => {
805        if (cmd.includes('ps --no-header')) return '52\n';
806        if (cmd.includes('is-active')) return 'active\n';
807        if (cmd.includes('journalctl')) return '';
808        if (cmd.includes('show mmo-cron.service')) return 'ActiveState=active\n';
809        return '';
810      });
811  
812      await runPipelineStatusMonitor();
813  
814      const content = capturedAppendCalls[0]?.content || '';
815      assert.ok(
816        content.includes('Zombie processes: 52') && content.includes('container restart needed'),
817        `expected critical zombie warning in: ${content.slice(0, 500)}`
818      );
819    });
820  });
821  
822  // ═══════════════════════════════════════════════════════════════
823  // checkPipelineHung
824  // ═══════════════════════════════════════════════════════════════
825  
826  describe('checkPipelineHung', () => {
827    test('reports pipeline log active when recently modified', async () => {
828      mockReaddirResults['logs'] = ['pipeline-2026-03-28.log'];
829      mockStatResults['pipeline-2026-03-28.log'] = { mtimeMs: Date.now() - 3 * 60 * 1000 };
830  
831      await runPipelineStatusMonitor();
832  
833      const content = capturedAppendCalls[0]?.content || '';
834      assert.ok(
835        content.includes('Pipeline log active'),
836        `expected "Pipeline log active" in: ${content.slice(0, 500)}`
837      );
838    });
839  
840    test('reports pipeline not yet created when no log files exist', async () => {
841      mockReaddirResults['logs'] = [];
842  
843      await runPipelineStatusMonitor();
844  
845      const content = capturedAppendCalls[0]?.content || '';
846      assert.ok(
847        content.includes('Pipeline log not yet created today'),
848        `expected "not yet created" in: ${content.slice(0, 500)}`
849      );
850    });
851  
852    test('triggers restart when pipeline log silent >60min', async () => {
853      mockReaddirResults['logs'] = ['pipeline-2026-03-28.log'];
854      mockStatResults['pipeline-2026-03-28.log'] = { mtimeMs: Date.now() - 65 * 60 * 1000 };
855  
856      execSyncMock.mock.mockImplementation(cmd => {
857        if (cmd.includes('restart') && cmd.includes('333method-pipeline')) return 'ok\n';
858        if (cmd.includes('is-active')) return 'active\n';
859        if (cmd.includes('journalctl')) return '';
860        if (cmd.includes('ps --no-header')) return '0';
861        if (cmd.includes('show mmo-cron.service')) return 'ActiveState=active\n';
862        return '';
863      });
864  
865      const result = await runPipelineStatusMonitor();
866  
867      const content = capturedAppendCalls[0]?.content || '';
868      assert.ok(
869        content.includes('Pipeline log silent'),
870        `expected "Pipeline log silent" in: ${content.slice(0, 500)}`
871      );
872      assert.ok(
873        result.actions.some(a => a.startsWith('restarted_hung_pipeline')),
874        'should record hung pipeline restart action'
875      );
876    });
877  
878    test('picks most-recently-modified log across all prefixes (proposals vs pipeline)', async () => {
879      mockReaddirResults['logs'] = ['pipeline-2026-03-28.log', 'proposals-2026-03-28.log'];
880      statSyncMock.mock.mockImplementation(path => {
881        if (path.includes('proposals')) return { mtimeMs: Date.now() - 2 * 60 * 1000 };
882        if (path.includes('pipeline')) return { mtimeMs: Date.now() - 30 * 60 * 1000 };
883        return { mtimeMs: Date.now() };
884      });
885  
886      await runPipelineStatusMonitor();
887  
888      const content = capturedAppendCalls[0]?.content || '';
889      assert.ok(
890        content.includes('Pipeline log active'),
891        `expected "Pipeline log active" using proposals log: ${content.slice(0, 500)}`
892      );
893      assert.ok(content.includes('proposals-2026-03-28.log'), 'should reference proposals log');
894    });
895  });
896  
897  // ═══════════════════════════════════════════════════════════════
898  // runPipelineStatusMonitor — return structure edge cases
899  // ═══════════════════════════════════════════════════════════════
900  
901  describe('runPipelineStatusMonitor — return structure', () => {
902    test('appendFileSync failure causes mkdirSync + retry', async () => {
903      appendShouldFail = true;
904  
905      const result = await runPipelineStatusMonitor();
906  
907      // mkdirSync should have been called to create the logs dir
908      assert.ok(mkdirSyncMock.mock.calls.length >= 1, 'should call mkdirSync on append failure');
909      assert.ok(result, 'should still return a result');
910    });
911  
912    test('reads existing snapshot when file exists', async () => {
913      const prevSnapshot = { found: 50, prog_scored: 20 };
914      mockExistsResults['pipeline-status-snapshot.json'] = true;
915      mockFileContents['pipeline-status-snapshot.json'] = JSON.stringify(prevSnapshot);
916  
917      for (let i = 0; i < 55; i++) {
918        db.prepare(`INSERT INTO sites (status) VALUES ('found')`).run();
919      }
920      for (let i = 0; i < 20; i++) {
921        db.prepare(`INSERT INTO sites (status) VALUES ('prog_scored')`).run();
922      }
923  
924      await runPipelineStatusMonitor();
925  
926      const content = capturedAppendCalls[0]?.content || '';
927      assert.ok(content.includes('found: 55 (+5)'), `expected delta for found: ${content.slice(0, 500)}`);
928      assert.ok(content.includes('prog_scored: 20 (=)'), `expected no change for prog_scored`);
929    });
930  
931    test('handles corrupt snapshot gracefully (uses empty prevCounts)', async () => {
932      mockExistsResults['pipeline-status-snapshot.json'] = true;
933      mockFileContents['pipeline-status-snapshot.json'] = '{{not json}}';
934  
935      for (let i = 0; i < 10; i++) {
936        db.prepare(`INSERT INTO sites (status) VALUES ('found')`).run();
937      }
938  
939      const result = await runPipelineStatusMonitor();
940      assert.ok(result, 'should return result despite corrupt snapshot');
941  
942      const content = capturedAppendCalls[0]?.content || '';
943      assert.ok(content.includes('found: 10 (+10)'), `expected "+10" delta with corrupt snapshot`);
944    });
945  
946    test('summary mentions actions when actions were taken', async () => {
947      // Trigger a reset by adding retryable failing sites
948      for (let i = 0; i < 2; i++) {
949        db.prepare(`INSERT INTO sites (status, error_message, chronic_failure_count, updated_at) VALUES ('failing', 'Timeout 30000ms exceeded', 0, datetime('now', '-1 hour'))`).run();
950      }
951  
952      const result = await runPipelineStatusMonitor();
953  
954      assert.ok(
955        result.summary.includes('actions'),
956        `summary should mention actions when sites were reset: ${result.summary}`
957      );
958    });
959  });