/ __quarantined_tests__ / agents / monitor-coverage5.test.js
monitor-coverage5.test.js
  1  /**
  2   * Monitor Agent Coverage Boost - Part 5
  3   *
  4   * Targets uncovered lines after monitor-coverage4.test.js:
  5   * - Lines 1480-1508:  checkSLOCompliance — violations path (createTask, addReviewItem for critical)
  6   *                     and compliant path (log info + completeTask)
  7   * - Lines 1516-1621:  checkLoops — site retry loops, agent bounce loops, log domain frequency
  8   * - Lines 1702-1748:  checkBlockedTasks — individual triage (non-pattern blocked tasks)
  9   * - Lines 1778-1930:  checkRateLimitPatterns — full JSONL parsing, signal detection
 10   *                     (wait_too_short, false_positive, high_frequency), active limits,
 11   *                     findings > 0 path, no-file path
 12   */
 13  
 14  process.env.DATABASE_PATH = '/tmp/test-monitor-cov5.db';
 15  process.env.AGENT_IMMEDIATE_INVOCATION = 'false';
 16  process.env.LOGS_DIR = '/tmp/test-logs-monitor-cov5/';
 17  
 18  import { test, describe, before, after, beforeEach } from 'node:test';
 19  import assert from 'node:assert/strict';
 20  import Database from 'better-sqlite3';
 21  import { unlinkSync, mkdirSync, rmSync, writeFileSync } from 'fs';
 22  
 23  const TEST_DB_PATH = '/tmp/test-monitor-cov5.db';
 24  const TEST_LOG_DIR = '/tmp/test-logs-monitor-cov5';
 25  
 26  // Clean up leftover DB files
 27  for (const ext of ['', '-wal', '-shm']) {
 28    try {
 29      unlinkSync(TEST_DB_PATH + ext);
 30    } catch {
 31      /* ignore */
 32    }
 33  }
 34  
 35  const DB_SCHEMA = `
 36    CREATE TABLE IF NOT EXISTS agent_tasks (
 37      id INTEGER PRIMARY KEY AUTOINCREMENT,
 38      task_type TEXT NOT NULL,
 39      assigned_to TEXT NOT NULL,
 40      created_by TEXT,
 41      status TEXT DEFAULT 'pending',
 42      priority INTEGER DEFAULT 5,
 43      context_json TEXT,
 44      result_json TEXT,
 45      parent_task_id INTEGER,
 46      error_message TEXT,
 47      created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
 48      started_at DATETIME,
 49      completed_at DATETIME,
 50      retry_count INTEGER DEFAULT 0
 51    );
 52    CREATE TABLE IF NOT EXISTS agent_logs (
 53      id INTEGER PRIMARY KEY AUTOINCREMENT,
 54      task_id INTEGER,
 55      agent_name TEXT NOT NULL,
 56      log_level TEXT,
 57      message TEXT NOT NULL,
 58      data_json TEXT,
 59      created_at DATETIME DEFAULT CURRENT_TIMESTAMP
 60    );
 61    CREATE TABLE IF NOT EXISTS agent_state (
 62      agent_name TEXT PRIMARY KEY,
 63      last_active DATETIME DEFAULT CURRENT_TIMESTAMP,
 64      current_task_id INTEGER,
 65      status TEXT DEFAULT 'idle',
 66      metrics_json TEXT
 67    );
 68    CREATE TABLE IF NOT EXISTS agent_messages (
 69      id INTEGER PRIMARY KEY AUTOINCREMENT,
 70      task_id INTEGER,
 71      from_agent TEXT NOT NULL,
 72      to_agent TEXT NOT NULL,
 73      message_type TEXT,
 74      content TEXT NOT NULL,
 75      metadata_json TEXT,
 76      context_json TEXT,
 77      created_at DATETIME DEFAULT CURRENT_TIMESTAMP,
 78      read_at DATETIME
 79    );
 80    CREATE TABLE IF NOT EXISTS human_review_queue (
 81      id INTEGER PRIMARY KEY AUTOINCREMENT,
 82      file TEXT NOT NULL,
 83      reason TEXT NOT NULL,
 84      type TEXT NOT NULL,
 85      priority TEXT NOT NULL,
 86      metadata TEXT,
 87      status TEXT DEFAULT 'pending',
 88      created_at DATETIME DEFAULT CURRENT_TIMESTAMP
 89    );
 90    CREATE TABLE IF NOT EXISTS settings (
 91      key TEXT PRIMARY KEY,
 92      value TEXT NOT NULL,
 93      description TEXT,
 94      updated_at DATETIME DEFAULT CURRENT_TIMESTAMP
 95    );
 96    CREATE TABLE IF NOT EXISTS sites (
 97      id INTEGER PRIMARY KEY AUTOINCREMENT,
 98      domain TEXT,
 99      landing_page_url TEXT,
100      status TEXT DEFAULT 'found',
101      error_message TEXT,
102      score REAL,
103      grade TEXT,
104      recapture_count INTEGER DEFAULT 0,
105      updated_at DATETIME DEFAULT CURRENT_TIMESTAMP,
106      created_at DATETIME DEFAULT CURRENT_TIMESTAMP
107    );
108    CREATE TABLE IF NOT EXISTS pipeline_metrics (
109      id INTEGER PRIMARY KEY AUTOINCREMENT,
110      stage_name TEXT NOT NULL,
111      sites_processed INTEGER DEFAULT 0,
112      sites_succeeded INTEGER DEFAULT 0,
113      sites_failed INTEGER DEFAULT 0,
114      duration_ms INTEGER NOT NULL,
115      started_at DATETIME NOT NULL,
116      finished_at DATETIME NOT NULL,
117      created_at DATETIME DEFAULT CURRENT_TIMESTAMP
118    );
119    CREATE TABLE IF NOT EXISTS agent_outcomes (
120      id INTEGER PRIMARY KEY AUTOINCREMENT,
121      task_id INTEGER,
122      agent_name TEXT NOT NULL,
123      outcome TEXT NOT NULL,
124      context_json TEXT,
125      created_at DATETIME DEFAULT CURRENT_TIMESTAMP
126    );
127    CREATE TABLE IF NOT EXISTS structured_logs (
128      id INTEGER PRIMARY KEY AUTOINCREMENT,
129      agent_name TEXT,
130      task_id INTEGER,
131      level TEXT,
132      message TEXT,
133      data_json TEXT,
134      created_at DATETIME DEFAULT CURRENT_TIMESTAMP
135    );
136    CREATE TABLE IF NOT EXISTS site_status (
137      id INTEGER PRIMARY KEY AUTOINCREMENT,
138      site_id INTEGER,
139      status TEXT,
140      created_at DATETIME DEFAULT CURRENT_TIMESTAMP
141    );
142    CREATE TABLE IF NOT EXISTS cron_locks (
143      lock_key TEXT PRIMARY KEY,
144      acquired_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
145      updated_at DATETIME NOT NULL DEFAULT CURRENT_TIMESTAMP,
146      description TEXT
147    );
148    INSERT OR IGNORE INTO agent_state (agent_name, status) VALUES ('monitor', 'idle');
149    INSERT OR IGNORE INTO agent_state (agent_name, status) VALUES ('triage', 'idle');
150    INSERT OR IGNORE INTO agent_state (agent_name, status) VALUES ('developer', 'idle');
151    INSERT OR IGNORE INTO agent_state (agent_name, status) VALUES ('qa', 'idle');
152    INSERT OR IGNORE INTO agent_state (agent_name, status) VALUES ('security', 'idle');
153    INSERT OR IGNORE INTO agent_state (agent_name, status) VALUES ('architect', 'idle');
154  `;
155  
156  const sharedDb = new Database(TEST_DB_PATH);
157  sharedDb.pragma('journal_mode = WAL');
158  sharedDb.pragma('busy_timeout = 10000');
159  sharedDb.exec(DB_SCHEMA);
160  
161  // ATTACH in-memory databases as ops and tel so queries like ops.settings, tel.agent_tasks resolve
162  sharedDb.exec(`
163    ATTACH ':memory:' AS ops;
164    ATTACH ':memory:' AS tel;
165    CREATE TABLE IF NOT EXISTS ops.settings (key TEXT PRIMARY KEY, value TEXT NOT NULL, description TEXT, updated_at DATETIME DEFAULT CURRENT_TIMESTAMP);
166    CREATE TABLE IF NOT EXISTS tel.agent_tasks (id INTEGER PRIMARY KEY AUTOINCREMENT, task_type TEXT NOT NULL, assigned_to TEXT NOT NULL, created_by TEXT, status TEXT DEFAULT 'pending', priority INTEGER DEFAULT 5, context_json TEXT, result_json TEXT, parent_task_id INTEGER, error_message TEXT, created_at DATETIME DEFAULT CURRENT_TIMESTAMP, started_at DATETIME, completed_at DATETIME, retry_count INTEGER DEFAULT 0);
167    CREATE TABLE IF NOT EXISTS tel.agent_logs (id INTEGER PRIMARY KEY AUTOINCREMENT, task_id INTEGER, agent_name TEXT NOT NULL, log_level TEXT, message TEXT NOT NULL, data_json TEXT, created_at DATETIME DEFAULT CURRENT_TIMESTAMP);
168    CREATE TABLE IF NOT EXISTS tel.agent_state (agent_name TEXT PRIMARY KEY, last_active DATETIME DEFAULT CURRENT_TIMESTAMP, current_task_id INTEGER, status TEXT DEFAULT 'idle', metrics_json TEXT);
169    CREATE TABLE IF NOT EXISTS tel.agent_messages (id INTEGER PRIMARY KEY AUTOINCREMENT, task_id INTEGER, from_agent TEXT NOT NULL, to_agent TEXT NOT NULL, message_type TEXT, content TEXT NOT NULL, metadata_json TEXT, context_json TEXT, created_at DATETIME DEFAULT CURRENT_TIMESTAMP, read_at DATETIME);
170    CREATE TABLE IF NOT EXISTS tel.agent_outcomes (id INTEGER PRIMARY KEY AUTOINCREMENT, task_id INTEGER, agent_name TEXT NOT NULL, task_type TEXT NOT NULL, outcome TEXT NOT NULL, context_json TEXT, result_json TEXT, duration_ms INTEGER, created_at DATETIME DEFAULT CURRENT_TIMESTAMP);
171    CREATE TABLE IF NOT EXISTS tel.pipeline_metrics (id INTEGER PRIMARY KEY AUTOINCREMENT, stage_name TEXT NOT NULL, sites_processed INTEGER DEFAULT 0, sites_succeeded INTEGER DEFAULT 0, sites_failed INTEGER DEFAULT 0, duration_ms INTEGER NOT NULL, started_at DATETIME NOT NULL, finished_at DATETIME NOT NULL, created_at DATETIME DEFAULT CURRENT_TIMESTAMP);
172    CREATE TABLE IF NOT EXISTS tel.structured_logs (id INTEGER PRIMARY KEY AUTOINCREMENT, agent_name TEXT, task_id INTEGER, level TEXT, message TEXT, data_json TEXT, created_at DATETIME DEFAULT CURRENT_TIMESTAMP);
173    INSERT OR IGNORE INTO tel.agent_state (agent_name, status) VALUES ('monitor', 'idle');
174    INSERT OR IGNORE INTO tel.agent_state (agent_name, status) VALUES ('triage', 'idle');
175    INSERT OR IGNORE INTO tel.agent_state (agent_name, status) VALUES ('developer', 'idle');
176    INSERT OR IGNORE INTO tel.agent_state (agent_name, status) VALUES ('qa', 'idle');
177    INSERT OR IGNORE INTO tel.agent_state (agent_name, status) VALUES ('security', 'idle');
178    INSERT OR IGNORE INTO tel.agent_state (agent_name, status) VALUES ('architect', 'idle');
179  `);
180  
181  import { resetDb as resetBaseDb } from '../../src/agents/base-agent.js';
182  import { resetDb as resetSLODb } from '../../src/agents/utils/slo-tracker.js';
183  import { MonitorAgent, resetDb as resetMonitorDb } from '../../src/agents/monitor.js';
184  
185  let agent;
186  
187  before(async () => {
188    mkdirSync(TEST_LOG_DIR, { recursive: true });
189    resetMonitorDb(sharedDb);
190    agent = new MonitorAgent();
191    await agent.initialize();
192  });
193  
194  after(() => {
195    resetMonitorDb(null);
196    resetBaseDb();
197    resetSLODb();
198    try {
199      sharedDb.close();
200    } catch {
201      /* ignore */
202    }
203    for (const ext of ['', '-wal', '-shm']) {
204      try {
205        unlinkSync(TEST_DB_PATH + ext);
206      } catch {
207        /* ignore */
208      }
209    }
210    try {
211      rmSync(TEST_LOG_DIR, { recursive: true, force: true });
212    } catch {
213      /* ignore */
214    }
215  });
216  
217  function clearTables() {
218    sharedDb.exec(`
219      DELETE FROM agent_tasks;
220      DELETE FROM agent_logs;
221      DELETE FROM agent_messages;
222      DELETE FROM human_review_queue;
223      DELETE FROM settings;
224      DELETE FROM sites;
225      DELETE FROM pipeline_metrics;
226      DELETE FROM agent_outcomes;
227      DELETE FROM site_status;
228      UPDATE agent_state SET status = 'idle', current_task_id = NULL, metrics_json = NULL;
229      INSERT OR IGNORE INTO agent_state (agent_name, status) VALUES ('monitor', 'idle');
230      INSERT OR IGNORE INTO agent_state (agent_name, status) VALUES ('triage', 'idle');
231      INSERT OR IGNORE INTO agent_state (agent_name, status) VALUES ('developer', 'idle');
232      INSERT OR IGNORE INTO agent_state (agent_name, status) VALUES ('qa', 'idle');
233      INSERT OR IGNORE INTO agent_state (agent_name, status) VALUES ('security', 'idle');
234      INSERT OR IGNORE INTO agent_state (agent_name, status) VALUES ('architect', 'idle');
235    `);
236  }
237  
238  function insertTask(taskType, context = {}, status = 'running', opts = {}) {
239    const r = sharedDb
240      .prepare(
241        `INSERT INTO agent_tasks (task_type, assigned_to, priority, context_json, status, created_at, error_message)
242         VALUES (?, 'monitor', 5, ?, ?, COALESCE(?, CURRENT_TIMESTAMP), ?)`
243      )
244      .run(
245        taskType,
246        JSON.stringify(context),
247        status,
248        opts.created_at || null,
249        opts.error_message || null
250      );
251    return sharedDb.prepare('SELECT * FROM agent_tasks WHERE id = ?').get(r.lastInsertRowid);
252  }
253  
254  beforeEach(() => {
255    clearTables();
256  });
257  
258  // ── checkSLOCompliance — compliant path (no violations) ──────────────────────
259  
260  describe('MonitorAgent - checkSLOCompliance compliant path (lines 1496-1507)', () => {
261    test('completes with zero violations when no pipeline_metrics exist', async () => {
262      const task = insertTask('check_slo_compliance', {}, 'running');
263  
264      await assert.doesNotReject(
265        agent.processTask(task),
266        'checkSLOCompliance should not throw with empty metrics'
267      );
268  
269      const completed = sharedDb
270        .prepare('SELECT status, result_json FROM agent_tasks WHERE id = ?')
271        .get(task.id);
272      assert.equal(completed?.status, 'completed');
273      const result = JSON.parse(completed.result_json || '{}');
274      assert.equal(result.violations, 0);
275    });
276  });
277  
278  // ── checkSLOCompliance — violations path ─────────────────────────────────────
279  
280  describe('MonitorAgent - checkSLOCompliance violations path (lines 1442-1494)', () => {
281    test('creates design_optimization tasks when SLO violations exist', async () => {
282      // Insert pipeline_metrics rows that will trigger SLO violations
283      // Assets stage: insert many slow rows so p95 exceeds target
284      const now = new Date();
285      for (let i = 0; i < 20; i++) {
286        const start = new Date(now.getTime() - 60 * 60 * 1000).toISOString();
287        const end = new Date(now.getTime() - 60 * 60 * 1000 + 45 * 60 * 1000).toISOString();
288        sharedDb
289          .prepare(
290            `INSERT INTO pipeline_metrics
291             (stage_name, sites_processed, sites_succeeded, sites_failed, duration_ms, started_at, finished_at)
292             VALUES (?, ?, ?, ?, ?, ?, ?)`
293          )
294          .run('assets', 1, 1, 0, 45 * 60 * 1000, start, end);
295      }
296  
297      const task = insertTask('check_slo_compliance', {}, 'running');
298  
299      await assert.doesNotReject(
300        agent.processTask(task),
301        'checkSLOCompliance with violations should not throw'
302      );
303  
304      const completed = sharedDb
305        .prepare('SELECT status, result_json FROM agent_tasks WHERE id = ?')
306        .get(task.id);
307      assert.equal(completed?.status, 'completed');
308  
309      // If violations were found, design_optimization tasks should have been created
310      const result = JSON.parse(completed.result_json || '{}');
311      assert.ok(typeof result.violations === 'number');
312      assert.ok(typeof result.total_slos === 'number');
313    });
314  });
315  
316  // ── checkLoops — no loops (empty DB) ─────────────────────────────────────────
317  
318  describe('MonitorAgent - checkLoops no-loops path (lines 1516-1621)', () => {
319    test('completes with zero loops when DB is empty', async () => {
320      const task = insertTask('check_loops', {}, 'running');
321  
322      await assert.doesNotReject(
323        agent.processTask(task),
324        'checkLoops should not throw with empty DB'
325      );
326  
327      const completed = sharedDb
328        .prepare('SELECT status, result_json FROM agent_tasks WHERE id = ?')
329        .get(task.id);
330      assert.equal(completed?.status, 'completed');
331      const result = JSON.parse(completed.result_json || '{}');
332      assert.equal(result.total_loops, 0);
333      assert.equal(result.site_retry_loops, 0);
334      assert.equal(result.agent_bounce_loops, 0);
335    });
336  });
337  
338  // ── checkLoops — site retry loops (recapture_count > 3) ──────────────────────
339  
340  describe('MonitorAgent - checkLoops site retry loops (lines 1531-1547)', () => {
341    test('detects sites with recapture_count > 3', async () => {
342      // Insert two sites with high recapture counts
343      sharedDb
344        .prepare(
345          `INSERT INTO sites (domain, status, recapture_count)
346           VALUES ('loopy-site.com', 'found', 5), ('another-loop.com', 'found', 4)`
347        )
348        .run();
349  
350      const task = insertTask('check_loops', {}, 'running');
351  
352      await assert.doesNotReject(
353        agent.processTask(task),
354        'checkLoops with site loops should not throw'
355      );
356  
357      const completed = sharedDb
358        .prepare('SELECT status, result_json FROM agent_tasks WHERE id = ?')
359        .get(task.id);
360      assert.equal(completed?.status, 'completed');
361      const result = JSON.parse(completed.result_json || '{}');
362      assert.equal(result.site_retry_loops, 2);
363      assert.ok(result.total_loops >= 2);
364    });
365  });
366  
367  // ── checkLoops — agent bounce loops ──────────────────────────────────────────
368  
369  describe('MonitorAgent - checkLoops agent bounce loops (lines 1550-1577)', () => {
370    test('detects parent tasks with >3 children in 24h', async () => {
371      // Insert a parent task
372      const parentR = sharedDb
373        .prepare(
374          `INSERT INTO agent_tasks (task_type, assigned_to, status, context_json)
375           VALUES ('check_agent_health', 'monitor', 'completed', '{}')`
376        )
377        .run();
378      const parentId = parentR.lastInsertRowid;
379  
380      // Insert 4 child tasks referencing the parent, all within the last 24h
381      for (let i = 0; i < 4; i++) {
382        sharedDb
383          .prepare(
384            `INSERT INTO agent_tasks (task_type, assigned_to, status, context_json, parent_task_id)
385             VALUES ('fix_bug', 'developer', 'blocked', '{}', ?)`
386          )
387          .run(parentId);
388      }
389  
390      const task = insertTask('check_loops', {}, 'running');
391  
392      await assert.doesNotReject(
393        agent.processTask(task),
394        'checkLoops with bounce loops should not throw'
395      );
396  
397      const completed = sharedDb
398        .prepare('SELECT status, result_json FROM agent_tasks WHERE id = ?')
399        .get(task.id);
400      assert.equal(completed?.status, 'completed');
401      const result = JSON.parse(completed.result_json || '{}');
402      assert.ok(result.agent_bounce_loops >= 1, 'should detect at least 1 bounce loop');
403      assert.ok(result.total_loops >= 1);
404    });
405  });
406  
407  // ── checkLoops — log domain frequency via file ────────────────────────────────
408  
409  describe('MonitorAgent - checkLoops log domain frequency (lines 1579-1621)', () => {
410    test('detects domain appearing >10 times in pipeline log', async () => {
411      const today = new Date().toISOString().slice(0, 10);
412      const logPath = `${TEST_LOG_DIR}/pipeline-${today}.log`;
413  
414      // Write a log file with one domain repeated 12 times
415      const lines = [];
416      for (let i = 0; i < 12; i++) {
417        lines.push(`[INFO] Processing domain=spammy-domain.com stage=assets attempt=${i}`);
418      }
419      lines.push('[INFO] Processing domain=normal-site.com stage=assets attempt=1');
420      writeFileSync(logPath, lines.join('\n'));
421  
422      const task = insertTask('check_loops', {}, 'running');
423  
424      await assert.doesNotReject(
425        agent.processTask(task),
426        'checkLoops with log domain loops should not throw'
427      );
428  
429      const completed = sharedDb
430        .prepare('SELECT status, result_json FROM agent_tasks WHERE id = ?')
431        .get(task.id);
432      assert.equal(completed?.status, 'completed');
433      const result = JSON.parse(completed.result_json || '{}');
434      assert.ok(result.log_domain_loops >= 1, 'should detect the spammy domain in log');
435  
436      // Clean up the log file
437      try {
438        unlinkSync(logPath);
439      } catch {
440        /* ignore */
441      }
442    });
443  });
444  
445  // ── checkBlockedTasks — individual triage for non-pattern blocked tasks ───────
446  
447  describe('MonitorAgent - checkBlockedTasks individual triage (lines 1702-1748)', () => {
448    test('creates classify_error task for individual blocked task without error pattern', async () => {
449      const twoHoursAgo = new Date(Date.now() - 3 * 60 * 60 * 1000)
450        .toISOString()
451        .replace('T', ' ')
452        .slice(0, 19);
453  
454      // Insert 1 blocked task with a unique error (won't form a pattern group of >3)
455      sharedDb
456        .prepare(
457          `INSERT INTO agent_tasks (task_type, assigned_to, status, error_message, created_at)
458           VALUES ('scan_logs', 'monitor', 'blocked', 'Unique transient error: connection refused on port 5432', ?)`
459        )
460        .run(twoHoursAgo);
461  
462      const r = sharedDb
463        .prepare(
464          `INSERT INTO agent_tasks (task_type, assigned_to, status, context_json)
465           VALUES ('check_blocked_tasks', 'monitor', 'running', '{}')`
466        )
467        .run();
468      const task = sharedDb.prepare('SELECT * FROM agent_tasks WHERE id = ?').get(r.lastInsertRowid);
469  
470      await agent.processTask(task);
471  
472      const completed = sharedDb
473        .prepare('SELECT status, result_json FROM agent_tasks WHERE id = ?')
474        .get(task.id);
475      assert.equal(completed?.status, 'completed');
476      const result = JSON.parse(completed.result_json || '{}');
477      assert.equal(result.total_blocked, 1);
478      assert.ok(result.triage_created >= 1, 'should create a classify_error triage task');
479  
480      // Verify the classify_error task was created
481      const triageTask = sharedDb
482        .prepare(
483          `SELECT * FROM agent_tasks
484           WHERE task_type = 'classify_error' AND assigned_to = 'triage'`
485        )
486        .get();
487      assert.ok(triageTask, 'classify_error task should be created');
488      const ctx = JSON.parse(triageTask.context_json || '{}');
489      assert.equal(ctx.error_type, 'blocked_task');
490      assert.ok(ctx.hours_blocked >= 0);
491    });
492  
493    test('does not duplicate triage when classify_error already exists for blocked task', async () => {
494      const twoHoursAgo = new Date(Date.now() - 3 * 60 * 60 * 1000)
495        .toISOString()
496        .replace('T', ' ')
497        .slice(0, 19);
498  
499      // Insert blocked task
500      const blockedR = sharedDb
501        .prepare(
502          `INSERT INTO agent_tasks (task_type, assigned_to, status, error_message, created_at)
503           VALUES ('detect_anomaly', 'monitor', 'blocked', 'Another unique error: timeout reading socket', ?)`
504        )
505        .run(twoHoursAgo);
506      const blockedId = blockedR.lastInsertRowid;
507  
508      // Pre-insert a triage_error child task for this blocked task
509      sharedDb
510        .prepare(
511          `INSERT INTO agent_tasks (task_type, assigned_to, status, context_json, parent_task_id)
512           VALUES ('triage_error', 'triage', 'pending', '{}', ?)`
513        )
514        .run(blockedId);
515  
516      const r = sharedDb
517        .prepare(
518          `INSERT INTO agent_tasks (task_type, assigned_to, status, context_json)
519           VALUES ('check_blocked_tasks', 'monitor', 'running', '{}')`
520        )
521        .run();
522      const task = sharedDb.prepare('SELECT * FROM agent_tasks WHERE id = ?').get(r.lastInsertRowid);
523  
524      await agent.processTask(task);
525  
526      const completed = sharedDb
527        .prepare('SELECT status, result_json FROM agent_tasks WHERE id = ?')
528        .get(task.id);
529      assert.equal(completed?.status, 'completed');
530      const result = JSON.parse(completed.result_json || '{}');
531      // Should not create a new triage task since one already exists
532      assert.equal(result.triage_created, 0, 'should not create duplicate triage task');
533    });
534  
535    test('handles blocked task with no error message (null error)', async () => {
536      const twoHoursAgo = new Date(Date.now() - 3 * 60 * 60 * 1000)
537        .toISOString()
538        .replace('T', ' ')
539        .slice(0, 19);
540  
541      // Blocked task with NULL error_message — errPrefix will be '' so it's excluded from pattern groups
542      sharedDb
543        .prepare(
544          `INSERT INTO agent_tasks (task_type, assigned_to, status, error_message, created_at)
545           VALUES ('check_pipeline_health', 'monitor', 'blocked', NULL, ?)`
546        )
547        .run(twoHoursAgo);
548  
549      const r = sharedDb
550        .prepare(
551          `INSERT INTO agent_tasks (task_type, assigned_to, status, context_json)
552           VALUES ('check_blocked_tasks', 'monitor', 'running', '{}')`
553        )
554        .run();
555      const task = sharedDb.prepare('SELECT * FROM agent_tasks WHERE id = ?').get(r.lastInsertRowid);
556  
557      await assert.doesNotReject(
558        agent.processTask(task),
559        'checkBlockedTasks should handle null error_message'
560      );
561  
562      const completed = sharedDb
563        .prepare('SELECT status, result_json FROM agent_tasks WHERE id = ?')
564        .get(task.id);
565      assert.equal(completed?.status, 'completed');
566      const result = JSON.parse(completed.result_json || '{}');
567      assert.equal(result.total_blocked, 1);
568      // Task with no error message still gets individual triage
569      assert.ok(result.triage_created >= 1, 'null-error blocked task should get triage');
570    });
571  });
572  
573  // ── checkRateLimitPatterns — no JSONL file exists ────────────────────────────
574  
575  describe('MonitorAgent - checkRateLimitPatterns no file (lines 1785-1800)', () => {
576    test('completes gracefully when rate-limit-events.jsonl does not exist', async () => {
577      const task = insertTask('check_rate_limits', {}, 'running');
578  
579      await assert.doesNotReject(
580        agent.processTask(task),
581        'checkRateLimitPatterns should not throw when JSONL file is missing'
582      );
583  
584      const completed = sharedDb
585        .prepare('SELECT status, result_json FROM agent_tasks WHERE id = ?')
586        .get(task.id);
587      assert.equal(completed?.status, 'completed');
588      const result = JSON.parse(completed.result_json || '{}');
589      assert.equal(result.events_analysed, 0);
590      assert.equal(result.findings, 0);
591    });
592  });
593  
594  // ── checkRateLimitPatterns — wait_too_short signal ───────────────────────────
595  
596  describe('MonitorAgent - checkRateLimitPatterns wait_too_short signal (lines 1805-1833)', () => {
597    test('detects wait_too_short when rate limit re-triggers within 2h of clear', async () => {
598      const now = Date.now();
599      const eventsFilePath = `${process.cwd()}/logs/rate-limit-events.jsonl`;
600  
601      // Ensure logs dir exists
602      try {
603        mkdirSync(`${process.cwd()}/logs`, { recursive: true });
604      } catch {
605        /* ignore */
606      }
607  
608      // Write events: clear at T-6d, then set 30 min later (< 2h gap)
609      const clearTime = new Date(now - 6 * 24 * 60 * 60 * 1000).toISOString();
610      const setTimeSoon = new Date(now - 6 * 24 * 60 * 60 * 1000 + 30 * 60 * 1000).toISOString();
611  
612      const events = [
613        JSON.stringify({ api: 'zenrows', type: 'clear', timestamp: clearTime }),
614        JSON.stringify({
615          api: 'zenrows',
616          type: 'set',
617          timestamp: setTimeSoon,
618          limitType: 'hourly',
619          reason: 'quota',
620        }),
621      ];
622      writeFileSync(eventsFilePath, `${events.join('\n')}\n`);
623  
624      const task = insertTask('check_rate_limits', {}, 'running');
625  
626      await assert.doesNotReject(
627        agent.processTask(task),
628        'checkRateLimitPatterns wait_too_short should not throw'
629      );
630  
631      const completed = sharedDb
632        .prepare('SELECT status, result_json FROM agent_tasks WHERE id = ?')
633        .get(task.id);
634      assert.equal(completed?.status, 'completed');
635      const result = JSON.parse(completed.result_json || '{}');
636      assert.ok(result.events_analysed >= 2);
637      assert.ok(result.findings >= 1, 'should detect wait_too_short signal');
638      assert.ok(
639        result.signals.some(s => s.signal === 'wait_too_short'),
640        'signals should include wait_too_short'
641      );
642  
643      // Clean up
644      try {
645        unlinkSync(eventsFilePath);
646      } catch {
647        /* ignore */
648      }
649    });
650  });
651  
652  // ── checkRateLimitPatterns — false_positive signal ───────────────────────────
653  
654  describe('MonitorAgent - checkRateLimitPatterns false_positive signal (lines 1835-1851)', () => {
655    test('detects false_positive when clear arrives >50% early', async () => {
656      const now = Date.now();
657      const eventsFilePath = `${process.cwd()}/logs/rate-limit-events.jsonl`;
658  
659      try {
660        mkdirSync(`${process.cwd()}/logs`, { recursive: true });
661      } catch {
662        /* ignore */
663      }
664  
665      // clear event with earlyByMs = 70% of scheduled wait
666      const scheduledMs = 60 * 60 * 1000; // 60 min scheduled
667      const earlyByMs = Math.round(scheduledMs * 0.7); // 70% early
668      const actualWaitMs = scheduledMs - earlyByMs;
669  
670      const clearTime = new Date(now - 3 * 24 * 60 * 60 * 1000).toISOString();
671      const events = [
672        JSON.stringify({
673          api: 'openrouter',
674          type: 'clear',
675          timestamp: clearTime,
676          earlyByMs,
677          actualWaitMs,
678        }),
679      ];
680      writeFileSync(eventsFilePath, `${events.join('\n')}\n`);
681  
682      const task = insertTask('check_rate_limits', {}, 'running');
683  
684      await assert.doesNotReject(
685        agent.processTask(task),
686        'checkRateLimitPatterns false_positive should not throw'
687      );
688  
689      const completed = sharedDb
690        .prepare('SELECT status, result_json FROM agent_tasks WHERE id = ?')
691        .get(task.id);
692      assert.equal(completed?.status, 'completed');
693      const result = JSON.parse(completed.result_json || '{}');
694      assert.ok(result.findings >= 1, 'should detect false_positive signal');
695      assert.ok(
696        result.signals.some(s => s.signal === 'false_positive'),
697        'signals should include false_positive'
698      );
699  
700      try {
701        unlinkSync(eventsFilePath);
702      } catch {
703        /* ignore */
704      }
705    });
706  });
707  
708  // ── checkRateLimitPatterns — high_frequency signal ───────────────────────────
709  
710  describe('MonitorAgent - checkRateLimitPatterns high_frequency signal (lines 1853-1876)', () => {
711    test('detects high_frequency when same API rate-limited >3 times in 24h', async () => {
712      const now = Date.now();
713      const eventsFilePath = `${process.cwd()}/logs/rate-limit-events.jsonl`;
714  
715      try {
716        mkdirSync(`${process.cwd()}/logs`, { recursive: true });
717      } catch {
718        /* ignore */
719      }
720  
721      // 4 'set' events for the same API within a single day (3 days ago)
722      const base = now - 3 * 24 * 60 * 60 * 1000;
723      const events = [0, 2, 4, 6].map(hoursOffset =>
724        JSON.stringify({
725          api: 'anthropic',
726          type: 'set',
727          timestamp: new Date(base + hoursOffset * 60 * 60 * 1000).toISOString(),
728          reason: 'quota',
729        })
730      );
731      writeFileSync(eventsFilePath, `${events.join('\n')}\n`);
732  
733      const task = insertTask('check_rate_limits', {}, 'running');
734  
735      await assert.doesNotReject(
736        agent.processTask(task),
737        'checkRateLimitPatterns high_frequency should not throw'
738      );
739  
740      const completed = sharedDb
741        .prepare('SELECT status, result_json FROM agent_tasks WHERE id = ?')
742        .get(task.id);
743      assert.equal(completed?.status, 'completed');
744      const result = JSON.parse(completed.result_json || '{}');
745      assert.ok(result.findings >= 1, 'should detect high_frequency signal');
746      assert.ok(
747        result.signals.some(s => s.signal === 'high_frequency'),
748        'signals should include high_frequency'
749      );
750  
751      try {
752        unlinkSync(eventsFilePath);
753      } catch {
754        /* ignore */
755      }
756    });
757  });
758  
759  // ── checkRateLimitPatterns — active limits surfaced ──────────────────────────
760  
761  describe('MonitorAgent - checkRateLimitPatterns active limits (lines 1880-1896)', () => {
762    test('surfaces currently active rate limits from rate-limits.json', async () => {
763      const now = Date.now();
764      const rateLimitsFile = `${process.cwd()}/logs/rate-limits.json`;
765      const eventsFilePath = `${process.cwd()}/logs/rate-limit-events.jsonl`;
766  
767      try {
768        mkdirSync(`${process.cwd()}/logs`, { recursive: true });
769      } catch {
770        /* ignore */
771      }
772  
773      // Write an empty events file (no findings) but an active rate-limits.json
774      writeFileSync(eventsFilePath, '');
775      writeFileSync(
776        rateLimitsFile,
777        JSON.stringify({
778          zenrows: {
779            resetAt: String(now + 2 * 60 * 60 * 1000), // 2h from now
780            stages: ['assets'],
781            reason: 'quota',
782          },
783          expired_api: {
784            resetAt: String(now - 1000), // already expired
785            stages: ['enrich'],
786            reason: 'quota',
787          },
788        })
789      );
790  
791      const task = insertTask('check_rate_limits', {}, 'running');
792  
793      await assert.doesNotReject(
794        agent.processTask(task),
795        'checkRateLimitPatterns with active limits should not throw'
796      );
797  
798      const completed = sharedDb
799        .prepare('SELECT status, result_json FROM agent_tasks WHERE id = ?')
800        .get(task.id);
801      assert.equal(completed?.status, 'completed');
802      const result = JSON.parse(completed.result_json || '{}');
803      // Only the non-expired limit should be surfaced
804      assert.equal(result.active_limits, 1, 'should surface 1 active limit (non-expired)');
805  
806      try {
807        unlinkSync(eventsFilePath);
808      } catch {
809        /* ignore */
810      }
811      try {
812        unlinkSync(rateLimitsFile);
813      } catch {
814        /* ignore */
815      }
816    });
817  });
818  
819  // ── checkRateLimitPatterns — JSONL with malformed lines ──────────────────────
820  
821  describe('MonitorAgent - checkRateLimitPatterns malformed JSONL lines (lines 1790-1797)', () => {
822    test('skips malformed lines in rate-limit-events.jsonl without throwing', async () => {
823      const now = Date.now();
824      const eventsFilePath = `${process.cwd()}/logs/rate-limit-events.jsonl`;
825  
826      try {
827        mkdirSync(`${process.cwd()}/logs`, { recursive: true });
828      } catch {
829        /* ignore */
830      }
831  
832      // Mix valid and invalid JSON lines
833      const recentTime = new Date(now - 1 * 24 * 60 * 60 * 1000).toISOString();
834      const lines = [
835        'this is not valid json {{{',
836        JSON.stringify({ api: 'zenrows', type: 'set', timestamp: recentTime, reason: 'quota' }),
837        '{"incomplete": true',
838        JSON.stringify({ api: 'zenrows', type: 'clear', timestamp: recentTime }),
839      ];
840      writeFileSync(eventsFilePath, `${lines.join('\n')}\n`);
841  
842      const task = insertTask('check_rate_limits', {}, 'running');
843  
844      await assert.doesNotReject(
845        agent.processTask(task),
846        'checkRateLimitPatterns should not throw with malformed JSONL'
847      );
848  
849      const completed = sharedDb
850        .prepare('SELECT status, result_json FROM agent_tasks WHERE id = ?')
851        .get(task.id);
852      assert.equal(completed?.status, 'completed');
853      const result = JSON.parse(completed.result_json || '{}');
854      // Only 2 valid lines should be parsed
855      assert.equal(result.events_analysed, 2, 'should parse only valid JSON lines');
856  
857      try {
858        unlinkSync(eventsFilePath);
859      } catch {
860        /* ignore */
861      }
862    });
863  });