/ tests / cron / precompute-dashboard-pg.test.js
precompute-dashboard-pg.test.js
  1  /**
  2   * Tests for src/cron/precompute-dashboard.js — using pg-mock pattern
  3   *
  4   * Uses createPgMock(db) to give every internal cache function real SQLite
  5   * execution, which drives function coverage from 0% to near 100%.
  6   *
  7   * Each describe block isolates one cache function via a distinct import
  8   * context. Because Node.js ESM caches modules we structure each test to
  9   * verify observable side-effects (rows in dashboard_cache) rather than
 10   * calling private functions directly.
 11   *
 12   * NOTE: requires --experimental-test-module-mocks
 13   */
 14  
 15  import { test, describe, mock, before, after } from 'node:test';
 16  import assert from 'node:assert/strict';
 17  import Database from 'better-sqlite3';
 18  import { join } from 'path';
 19  import { tmpdir } from 'os';
 20  import { existsSync, unlinkSync } from 'fs';
 21  import { createPgMock } from '../helpers/pg-mock.js';
 22  
 23  // ── Set up in-memory SQLite with full schema ──────────────────────────────────
 24  
 25  const db = new Database(':memory:');
 26  
 27  // Register SQLite user-defined functions that emulate PostgreSQL system functions
 28  // needed by precompute-dashboard.js — returns constants so queries succeed.
 29  db.function('current_database', () => 'test_db');
 30  // pg_database_size receives the db name as its argument (passed from current_database())
 31  db.function('pg_database_size', (_dbName) => 5 * 1024 * 1024); // fake 5 MB
 32  
 33  // date_trunc(precision, timestamp) → strftime truncated to precision
 34  db.function('date_trunc', (precision, ts) => {
 35    if (!ts) return null;
 36    const d = new Date(ts);
 37    switch (precision) {
 38      case 'hour':
 39        return `${d.getUTCFullYear()}-${String(d.getUTCMonth()+1).padStart(2,'0')}-${String(d.getUTCDate()).padStart(2,'0')} ${String(d.getUTCHours()).padStart(2,'0')}:00:00`;
 40      case 'day':
 41        return `${d.getUTCFullYear()}-${String(d.getUTCMonth()+1).padStart(2,'0')}-${String(d.getUTCDate()).padStart(2,'0')} 00:00:00`;
 42      default:
 43        return ts;
 44    }
 45  });
 46  
 47  // to_char(value, format) → simplified string conversion for the formats used here
 48  db.function('to_char', (value, _format) => {
 49    // For our purposes, value is already a truncated timestamp string — just return it
 50    if (!value) return null;
 51    return String(value).slice(0, 19); // 'YYYY-MM-DD HH:MM:SS'
 52  });
 53  
 54  db.exec(`
 55    -- Core tables
 56    CREATE TABLE IF NOT EXISTS sites (
 57      id INTEGER PRIMARY KEY AUTOINCREMENT,
 58      domain TEXT NOT NULL DEFAULT 'test.com',
 59      status TEXT DEFAULT 'found',
 60      score REAL,
 61      error_message TEXT,
 62      http_status_code INTEGER,
 63      resulted_in_sale INTEGER DEFAULT 0,
 64      sale_amount REAL DEFAULT 0.0,
 65      created_at TEXT DEFAULT (datetime('now')),
 66      updated_at TEXT DEFAULT (datetime('now'))
 67    );
 68  
 69    CREATE TABLE IF NOT EXISTS messages (
 70      id INTEGER PRIMARY KEY AUTOINCREMENT,
 71      site_id INTEGER,
 72      direction TEXT NOT NULL DEFAULT 'outbound',
 73      contact_method TEXT DEFAULT 'email',
 74      approval_status TEXT DEFAULT 'approved',
 75      delivery_status TEXT DEFAULT 'sent',
 76      read_at TEXT,
 77      created_at TEXT DEFAULT (datetime('now')),
 78      updated_at TEXT DEFAULT (datetime('now'))
 79    );
 80  
 81    CREATE TABLE IF NOT EXISTS keywords (
 82      id INTEGER PRIMARY KEY AUTOINCREMENT,
 83      keyword TEXT NOT NULL,
 84      status TEXT DEFAULT 'pending',
 85      created_at TEXT DEFAULT (datetime('now'))
 86    );
 87  
 88    CREATE TABLE IF NOT EXISTS site_status (
 89      id INTEGER PRIMARY KEY AUTOINCREMENT,
 90      site_id INTEGER NOT NULL,
 91      status TEXT,
 92      created_at TEXT DEFAULT (datetime('now'))
 93    );
 94  
 95    CREATE TABLE IF NOT EXISTS human_review_queue (
 96      id INTEGER PRIMARY KEY AUTOINCREMENT,
 97      file TEXT,
 98      reason TEXT,
 99      type TEXT,
100      priority TEXT DEFAULT 'medium',
101      status TEXT DEFAULT 'pending',
102      created_at TEXT DEFAULT (datetime('now'))
103    );
104  
105    CREATE TABLE IF NOT EXISTS dashboard_cache (
106      cache_key TEXT PRIMARY KEY,
107      cache_value TEXT NOT NULL,
108      expires_at TEXT NOT NULL,
109      updated_at TEXT NOT NULL DEFAULT (datetime('now'))
110    );
111  
112    -- ops schema tables (stripped of prefix by pg-mock)
113    CREATE TABLE IF NOT EXISTS cron_jobs (
114      id INTEGER PRIMARY KEY AUTOINCREMENT,
115      name TEXT NOT NULL UNIQUE,
116      task_key TEXT NOT NULL UNIQUE,
117      interval_value INTEGER NOT NULL DEFAULT 5,
118      interval_unit TEXT NOT NULL DEFAULT 'minutes',
119      enabled INTEGER DEFAULT 1,
120      last_run_at TEXT
121    );
122  
123    CREATE TABLE IF NOT EXISTS cron_job_logs (
124      id INTEGER PRIMARY KEY AUTOINCREMENT,
125      job_name TEXT NOT NULL,
126      started_at TEXT NOT NULL DEFAULT (datetime('now')),
127      finished_at TEXT,
128      status TEXT NOT NULL DEFAULT 'running',
129      summary TEXT,
130      full_log TEXT,
131      error_message TEXT,
132      items_processed INTEGER DEFAULT 0,
133      items_failed INTEGER DEFAULT 0
134    );
135  
136    -- tel schema tables (stripped of prefix by pg-mock)
137    CREATE TABLE IF NOT EXISTS llm_usage (
138      id INTEGER PRIMARY KEY AUTOINCREMENT,
139      site_id INTEGER,
140      stage TEXT NOT NULL,
141      provider TEXT NOT NULL DEFAULT 'openrouter',
142      model TEXT NOT NULL DEFAULT 'test-model',
143      prompt_tokens INTEGER NOT NULL DEFAULT 100,
144      completion_tokens INTEGER NOT NULL DEFAULT 50,
145      total_tokens INTEGER NOT NULL DEFAULT 150,
146      estimated_cost REAL DEFAULT 0.001,
147      created_at TEXT DEFAULT (datetime('now'))
148    );
149  
150    CREATE TABLE IF NOT EXISTS system_metrics (
151      id INTEGER PRIMARY KEY AUTOINCREMENT,
152      cpu_percent REAL DEFAULT 5.0,
153      disk_read_mb REAL DEFAULT 1.0,
154      disk_write_mb REAL DEFAULT 0.5,
155      memory_percent REAL DEFAULT 30.0,
156      recorded_at TEXT DEFAULT (datetime('now'))
157    );
158  
159    -- pg_tables mock for purchases check
160    CREATE TABLE IF NOT EXISTS pg_tables (
161      schemaname TEXT,
162      tablename TEXT
163    );
164  
165    -- Purchases table for summary test
166    CREATE TABLE IF NOT EXISTS purchases (
167      id INTEGER PRIMARY KEY AUTOINCREMENT,
168      status TEXT NOT NULL DEFAULT 'completed',
169      amount_usd REAL DEFAULT 297.0,
170      country_code TEXT DEFAULT 'AU',
171      delivered_at TEXT,
172      created_at TEXT DEFAULT (datetime('now'))
173    );
174  `);
175  
176  // ── Mock db.js BEFORE importing precompute-dashboard ─────────────────────────
177  
178  mock.module('../../src/utils/db.js', { namedExports: createPgMock(db) });
179  
180  mock.module('../../src/utils/logger.js', {
181    defaultExport: class {
182      info() {}
183      warn() {}
184      error() {}
185      debug() {}
186      success() {}
187    },
188  });
189  
190  // error-categories uses no DB — mock it to avoid side-effect imports
191  mock.module('../../src/utils/error-categories.js', {
192    namedExports: {
193      buildStatusTree: () => [{ label: 'test', count: 0 }],
194      buildOutreachTree: () => [{ label: 'test', count: 0 }],
195    },
196  });
197  
198  const { precomputeDashboard } = await import('../../src/cron/precompute-dashboard.js');
199  
200  // ── Helper ────────────────────────────────────────────────────────────────────
201  
202  function getCacheRow(key) {
203    return db.prepare('SELECT * FROM dashboard_cache WHERE cache_key = ?').get(key);
204  }
205  
206  function clearCache() {
207    db.prepare('DELETE FROM dashboard_cache').run();
208  }
209  
210  // ── Tests ─────────────────────────────────────────────────────────────────────
211  
212  describe('precomputeDashboard — pipeline_funnel', () => {
213    test('caches pipeline_funnel key with empty sites table', async () => {
214      clearCache();
215      await precomputeDashboard();
216      const row = getCacheRow('pipeline_funnel');
217      assert.ok(row, 'pipeline_funnel key should exist');
218      const value = JSON.parse(row.cache_value);
219      assert.ok(Array.isArray(value), 'pipeline_funnel should be array');
220    });
221  
222    test('caches pipeline_funnel with actual site data', async () => {
223      clearCache();
224      db.prepare("INSERT INTO sites (status) VALUES ('found')").run();
225      db.prepare("INSERT INTO sites (status) VALUES ('assets_captured')").run();
226      db.prepare("INSERT INTO sites (status) VALUES ('ignored')").run(); // excluded
227      await precomputeDashboard();
228      const row = getCacheRow('pipeline_funnel');
229      const value = JSON.parse(row.cache_value);
230      // Should have found + assets_captured (2 statuses, not ignored)
231      assert.ok(value.some(r => r.status === 'found'), 'should include found status');
232      assert.ok(!value.some(r => r.status === 'ignored'), 'should not include ignored status');
233      db.prepare("DELETE FROM sites").run();
234    });
235  });
236  
237  describe('precomputeDashboard — response_rates', () => {
238    test('caches response_rates with outbound messages', async () => {
239      clearCache();
240      db.prepare(
241        "INSERT INTO messages (direction, contact_method, delivery_status) VALUES ('outbound', 'email', 'sent')"
242      ).run();
243      db.prepare(
244        "INSERT INTO messages (direction, contact_method, delivery_status) VALUES ('outbound', 'sms', 'delivered')"
245      ).run();
246      await precomputeDashboard();
247      const row = getCacheRow('response_rates');
248      assert.ok(row, 'response_rates key should exist');
249      const value = JSON.parse(row.cache_value);
250      assert.ok(Array.isArray(value));
251      db.prepare("DELETE FROM messages").run();
252    });
253  
254    test('caches response_rates with empty messages table', async () => {
255      clearCache();
256      await precomputeDashboard();
257      const row = getCacheRow('response_rates');
258      assert.ok(row, 'response_rates should exist even when empty');
259      const value = JSON.parse(row.cache_value);
260      assert.ok(Array.isArray(value));
261      assert.equal(value.length, 0, 'should be empty array when no messages');
262    });
263  });
264  
265  describe('precomputeDashboard — outreach_funnel', () => {
266    test('caches outreach_funnel with mixed delivery statuses', async () => {
267      clearCache();
268      for (const status of ['sent', 'delivered', 'failed', 'bounced']) {
269        db.prepare(
270          "INSERT INTO messages (direction, contact_method, delivery_status, approval_status) VALUES ('outbound', 'email', ?, 'approved')"
271        ).run(status);
272      }
273      await precomputeDashboard();
274      const row = getCacheRow('outreach_funnel');
275      assert.ok(row, 'outreach_funnel key should exist');
276      const value = JSON.parse(row.cache_value);
277      assert.ok(Array.isArray(value));
278      db.prepare("DELETE FROM messages").run();
279    });
280  });
281  
282  describe('precomputeDashboard — llm_usage_by_stage', () => {
283    test('caches llm_usage_by_stage with data', async () => {
284      clearCache();
285      db.prepare(
286        "INSERT INTO llm_usage (stage, prompt_tokens, completion_tokens, total_tokens, estimated_cost) VALUES ('scoring', 100, 50, 150, 0.002)"
287      ).run();
288      db.prepare(
289        "INSERT INTO llm_usage (stage, prompt_tokens, completion_tokens, total_tokens, estimated_cost) VALUES ('proposals', 200, 100, 300, 0.005)"
290      ).run();
291      await precomputeDashboard();
292      const row = getCacheRow('llm_usage_by_stage');
293      assert.ok(row, 'llm_usage_by_stage key should exist');
294      const value = JSON.parse(row.cache_value);
295      assert.ok(Array.isArray(value));
296      assert.ok(value.some(r => r.stage === 'scoring' || r.stage === 'proposals'));
297      db.prepare("DELETE FROM llm_usage").run();
298    });
299  });
300  
301  describe('precomputeDashboard — daily_throughput_30d', () => {
302    test('caches daily_throughput_30d', async () => {
303      clearCache();
304      await precomputeDashboard();
305      const row = getCacheRow('daily_throughput_30d');
306      assert.ok(row, 'daily_throughput_30d should exist');
307      const value = JSON.parse(row.cache_value);
308      assert.ok(Array.isArray(value));
309    });
310  });
311  
312  describe('precomputeDashboard — conversation_stats', () => {
313    test('caches conversation_stats with inbound messages', async () => {
314      clearCache();
315      db.prepare(
316        "INSERT INTO messages (direction, read_at) VALUES ('inbound', NULL)"
317      ).run();
318      db.prepare(
319        "INSERT INTO messages (direction, read_at) VALUES ('inbound', datetime('now'))"
320      ).run();
321      await precomputeDashboard();
322      const row = getCacheRow('conversation_stats');
323      assert.ok(row, 'conversation_stats should exist');
324      const value = JSON.parse(row.cache_value);
325      // Should have inbound count and unread count
326      assert.ok(value !== null);
327      db.prepare("DELETE FROM messages").run();
328    });
329  });
330  
331  describe('precomputeDashboard — error_breakdown', () => {
332    test('caches error_breakdown with error sites', async () => {
333      clearCache();
334      db.prepare(
335        "INSERT INTO sites (status, error_message) VALUES ('assets_captured', 'Connection timeout')"
336      ).run();
337      db.prepare(
338        "INSERT INTO sites (status, error_message) VALUES ('assets_captured', 'Connection timeout')"
339      ).run();
340      db.prepare(
341        "INSERT INTO sites (status, error_message) VALUES ('ignored', 'Some error')"
342      ).run(); // ignored — excluded
343      await precomputeDashboard();
344      const row = getCacheRow('error_breakdown');
345      assert.ok(row, 'error_breakdown should exist');
346      const value = JSON.parse(row.cache_value);
347      assert.ok(Array.isArray(value));
348      assert.ok(value.some(r => r.error_message === 'Connection timeout'));
349      db.prepare("DELETE FROM sites").run();
350    });
351  });
352  
353  describe('precomputeDashboard — excluded_sites_count', () => {
354    test('caches excluded_sites_count with ignored and failing sites', async () => {
355      clearCache();
356      db.prepare("INSERT INTO sites (status) VALUES ('ignored')").run();
357      db.prepare("INSERT INTO sites (status) VALUES ('ignored')").run();
358      db.prepare("INSERT INTO sites (status) VALUES ('failing')").run();
359      db.prepare("INSERT INTO sites (status) VALUES ('found')").run();
360      await precomputeDashboard();
361      const row = getCacheRow('excluded_sites_count');
362      assert.ok(row, 'excluded_sites_count should exist');
363      const value = JSON.parse(row.cache_value);
364      assert.equal(Number(value.ignored), 2, 'should count 2 ignored sites');
365      assert.equal(Number(value.failing), 1, 'should count 1 failing site');
366      db.prepare("DELETE FROM sites").run();
367    });
368  
369    test('excluded_sites_count defaults to 0 when no excluded sites', async () => {
370      clearCache();
371      await precomputeDashboard();
372      const row = getCacheRow('excluded_sites_count');
373      const value = JSON.parse(row.cache_value);
374      assert.equal(value.ignored, 0);
375      assert.equal(value.failing, 0);
376    });
377  });
378  
379  describe('precomputeDashboard — total_active_errors', () => {
380    test('caches total_active_errors count', async () => {
381      clearCache();
382      db.prepare(
383        "INSERT INTO sites (status, error_message) VALUES ('found', 'Some error')"
384      ).run();
385      db.prepare(
386        "INSERT INTO sites (status, error_message) VALUES ('ignored', 'Error but ignored')"
387      ).run();
388      await precomputeDashboard();
389      const row = getCacheRow('total_active_errors');
390      assert.ok(row, 'total_active_errors should exist');
391      const value = JSON.parse(row.cache_value);
392      // value is the raw count (number or '1')
393      assert.ok(Number(value) >= 0, 'should be a non-negative number');
394      db.prepare("DELETE FROM sites").run();
395    });
396  });
397  
398  describe('precomputeDashboard — cron_job_summary', () => {
399    test('caches cron_job_summary with cron logs', async () => {
400      clearCache();
401      db.prepare(
402        "INSERT INTO cron_jobs (name, task_key, interval_value, interval_unit) VALUES ('Test Job', 'testJob', 5, 'minutes')"
403      ).run();
404      db.prepare(
405        "INSERT INTO cron_job_logs (job_name, started_at, finished_at, status) VALUES ('Test Job', datetime('now', '-1 hour'), datetime('now', '-58 minutes'), 'success')"
406      ).run();
407      db.prepare(
408        "INSERT INTO cron_job_logs (job_name, started_at, finished_at, status) VALUES ('Test Job', datetime('now', '-2 hours'), datetime('now', '-1 hour 58 minutes'), 'failed')"
409      ).run();
410      await precomputeDashboard();
411      const row = getCacheRow('cron_job_summary');
412      assert.ok(row, 'cron_job_summary should exist');
413      const value = JSON.parse(row.cache_value);
414      assert.ok(Array.isArray(value));
415      db.prepare("DELETE FROM cron_job_logs").run();
416      db.prepare("DELETE FROM cron_jobs").run();
417    });
418  });
419  
420  describe('precomputeDashboard — database_health', () => {
421    test('caches database_health with table counts', async () => {
422      clearCache();
423      // Insert some test data
424      db.prepare("INSERT INTO sites (status) VALUES ('found')").run();
425      db.prepare(
426        "INSERT INTO messages (direction, delivery_status) VALUES ('outbound', 'sent')"
427      ).run();
428      db.prepare(
429        "INSERT INTO messages (direction, delivery_status) VALUES ('inbound', 'received')"
430      ).run();
431      db.prepare("INSERT INTO keywords (keyword) VALUES ('test keyword')").run();
432      await precomputeDashboard();
433      const row = getCacheRow('database_health');
434      assert.ok(row, 'database_health should exist');
435      const value = JSON.parse(row.cache_value);
436      assert.ok('table_counts' in value, 'should have table_counts');
437      assert.ok(Array.isArray(value.table_counts), 'table_counts should be array');
438      assert.equal(value.integrity, 'ok', 'integrity should be ok');
439      db.prepare("DELETE FROM sites").run();
440      db.prepare("DELETE FROM messages").run();
441      db.prepare("DELETE FROM keywords").run();
442    });
443  });
444  
445  describe('precomputeDashboard — chart data caching', () => {
446    test('caches chart_hourly_status_breakdown_48h', async () => {
447      clearCache();
448      db.prepare(
449        "INSERT INTO site_status (site_id, status) VALUES (1, 'found')"
450      ).run();
451      await precomputeDashboard();
452      const row = getCacheRow('chart_hourly_status_breakdown_48h');
453      assert.ok(row, 'chart_hourly_status_breakdown_48h should exist');
454      const value = JSON.parse(row.cache_value);
455      assert.ok(Array.isArray(value));
456      db.prepare("DELETE FROM site_status").run();
457    });
458  
459    test('caches chart_hourly_throughput_48h', async () => {
460      clearCache();
461      await precomputeDashboard();
462      const row = getCacheRow('chart_hourly_throughput_48h');
463      assert.ok(row, 'chart_hourly_throughput_48h should exist');
464      const value = JSON.parse(row.cache_value);
465      assert.ok(Array.isArray(value));
466    });
467  
468    test('caches chart_cron_timeline_24h and sorts chronologically', async () => {
469      clearCache();
470      db.prepare(
471        "INSERT INTO cron_job_logs (job_name, started_at, finished_at, status) VALUES ('Job A', datetime('now', '-3 hours'), datetime('now', '-2 hours 50 minutes'), 'success')"
472      ).run();
473      db.prepare(
474        "INSERT INTO cron_job_logs (job_name, started_at, finished_at, status) VALUES ('Job B', datetime('now', '-1 hour'), datetime('now', '-50 minutes'), 'success')"
475      ).run();
476      await precomputeDashboard();
477      const row = getCacheRow('chart_cron_timeline_24h');
478      assert.ok(row, 'chart_cron_timeline_24h should exist');
479      const value = JSON.parse(row.cache_value);
480      assert.ok(Array.isArray(value));
481      // Should be sorted chronologically (ascending started_at)
482      if (value.length >= 2) {
483        assert.ok(
484          new Date(value[0].started_at) <= new Date(value[value.length - 1].started_at),
485          'should be sorted chronologically'
486        );
487      }
488      db.prepare("DELETE FROM cron_job_logs").run();
489    });
490  
491    test('caches chart_system_metrics_24h', async () => {
492      clearCache();
493      db.prepare(
494        "INSERT INTO system_metrics (cpu_percent, memory_percent) VALUES (15.5, 42.0)"
495      ).run();
496      await precomputeDashboard();
497      const row = getCacheRow('chart_system_metrics_24h');
498      assert.ok(row, 'chart_system_metrics_24h should exist');
499      const value = JSON.parse(row.cache_value);
500      assert.ok(Array.isArray(value));
501      db.prepare("DELETE FROM system_metrics").run();
502    });
503  
504    test('caches chart_llm_daily_costs_30d', async () => {
505      clearCache();
506      db.prepare(
507        "INSERT INTO llm_usage (stage, prompt_tokens, completion_tokens, total_tokens, estimated_cost) VALUES ('scoring', 100, 50, 150, 0.002)"
508      ).run();
509      await precomputeDashboard();
510      const row = getCacheRow('chart_llm_daily_costs_30d');
511      assert.ok(row, 'chart_llm_daily_costs_30d should exist');
512      const value = JSON.parse(row.cache_value);
513      assert.ok(Array.isArray(value));
514      db.prepare("DELETE FROM llm_usage").run();
515    });
516  
517    test('caches chart_llm_cost_by_stage_30d', async () => {
518      clearCache();
519      await precomputeDashboard();
520      const row = getCacheRow('chart_llm_cost_by_stage_30d');
521      assert.ok(row, 'chart_llm_cost_by_stage_30d should exist');
522      const value = JSON.parse(row.cache_value);
523      assert.ok(Array.isArray(value));
524    });
525  
526    test('caches chart_http_errors_30d', async () => {
527      clearCache();
528      db.prepare(
529        "INSERT INTO sites (status, http_status_code) VALUES ('assets_captured', 404)"
530      ).run();
531      db.prepare(
532        "INSERT INTO sites (status, http_status_code) VALUES ('assets_captured', 500)"
533      ).run();
534      await precomputeDashboard();
535      const row = getCacheRow('chart_http_errors_30d');
536      assert.ok(row, 'chart_http_errors_30d should exist');
537      const value = JSON.parse(row.cache_value);
538      assert.ok(Array.isArray(value));
539      db.prepare("DELETE FROM sites").run();
540    });
541  
542    test('caches chart_cron_daily_history_7d', async () => {
543      clearCache();
544      await precomputeDashboard();
545      const row = getCacheRow('chart_cron_daily_history_7d');
546      assert.ok(row, 'chart_cron_daily_history_7d should exist');
547      const value = JSON.parse(row.cache_value);
548      assert.ok(Array.isArray(value));
549    });
550  });
551  
552  describe('precomputeDashboard — purchases_summary', () => {
553    test('returns empty purchases_summary when purchases table does not exist in pg_tables', async () => {
554      clearCache();
555      // pg_tables is empty — no 'purchases' row — so the table check returns null
556      db.prepare("DELETE FROM pg_tables").run();
557      await precomputeDashboard();
558      const row = getCacheRow('purchases_summary');
559      assert.ok(row, 'purchases_summary should exist');
560      const value = JSON.parse(row.cache_value);
561      assert.deepEqual(value.by_status, []);
562      assert.equal(value.total_revenue_usd, 0);
563      assert.equal(value.failed_count, 0);
564    });
565  
566    test('returns full purchases_summary when purchases table exists', async () => {
567      clearCache();
568      // Register purchases table in pg_tables mock
569      db.prepare(
570        "INSERT OR IGNORE INTO pg_tables (schemaname, tablename) VALUES ('m333', 'purchases')"
571      ).run();
572      // Seed purchase data
573      db.prepare(
574        "INSERT INTO purchases (status, amount_usd, country_code) VALUES ('completed', 297.0, 'AU')"
575      ).run();
576      db.prepare(
577        "INSERT INTO purchases (status, amount_usd, country_code) VALUES ('completed', 159.0, 'GB')"
578      ).run();
579      db.prepare(
580        "INSERT INTO purchases (status, amount_usd, country_code) VALUES ('failed', 0, 'US')"
581      ).run();
582      await precomputeDashboard();
583      const row = getCacheRow('purchases_summary');
584      assert.ok(row, 'purchases_summary should exist');
585      const value = JSON.parse(row.cache_value);
586      assert.ok(Array.isArray(value.by_status));
587      assert.ok(Array.isArray(value.revenue_by_country));
588      assert.ok(Number(value.total_revenue_usd) > 0, 'should have revenue');
589      assert.ok(Number(value.failed_count) >= 0);
590      db.prepare("DELETE FROM purchases").run();
591      db.prepare("DELETE FROM pg_tables").run();
592    });
593  });
594  
595  describe('precomputeDashboard — cost_forecast', () => {
596    test('computes cost_forecast when no existing cache entry', async () => {
597      clearCache();
598      // Seed some LLM usage and pipeline data
599      db.prepare(
600        "INSERT INTO llm_usage (stage, estimated_cost, total_tokens) VALUES ('scoring', 0.01, 500)"
601      ).run();
602      db.prepare(
603        "INSERT INTO sites (status) VALUES ('found')"
604      ).run();
605      await precomputeDashboard();
606      const row = getCacheRow('cost_forecast');
607      assert.ok(row, 'cost_forecast should be written');
608      const value = JSON.parse(row.cache_value);
609      assert.ok('daily_api_cost_avg' in value);
610      assert.ok('pipeline_cost_forecast' in value);
611      assert.ok('avg_deal_value' in value);
612      db.prepare("DELETE FROM llm_usage").run();
613      db.prepare("DELETE FROM sites").run();
614    });
615  
616    test('skips cost_forecast when a fresh entry exists', async () => {
617      clearCache();
618      // Insert a manually-crafted fresh cache entry with far-future expires_at
619      db.prepare(
620        "INSERT INTO dashboard_cache (cache_key, cache_value, expires_at, updated_at) VALUES ('cost_forecast', '{\"daily_api_cost_avg\":0.05}', datetime('now', '+10 days'), datetime('now'))"
621      ).run();
622      await precomputeDashboard();
623      const row = getCacheRow('cost_forecast');
624      // The value should NOT be overwritten — still the original manual entry
625      const value = JSON.parse(row.cache_value);
626      assert.equal(value.daily_api_cost_avg, 0.05, 'fresh cache should not be overwritten');
627    });
628  });
629  
630  describe('precomputeDashboard — cacheStatusTrees', () => {
631    test('caches status_tree and outreach_tree via buildStatusTree/buildOutreachTree', async () => {
632      clearCache();
633      await precomputeDashboard();
634      const siteTree = getCacheRow('status_tree');
635      const outreachTree = getCacheRow('outreach_tree');
636      assert.ok(siteTree, 'status_tree should be cached');
637      assert.ok(outreachTree, 'outreach_tree should be cached');
638    });
639  });
640  
641  describe('precomputeDashboard — return value structure', () => {
642    test('returns summary, details, and metrics', async () => {
643      clearCache();
644      const result = await precomputeDashboard();
645      assert.ok(result && typeof result === 'object', 'should return an object');
646      assert.ok(typeof result.summary === 'string', 'should have summary string');
647      assert.ok(typeof result.details === 'object', 'should have details object');
648      assert.ok(typeof result.metrics === 'object', 'should have metrics object');
649      assert.ok(typeof result.metrics.entries_cached === 'number', 'entries_cached should be number');
650      assert.ok(typeof result.metrics.duration_ms === 'number', 'duration_ms should be number');
651      assert.equal(result.metrics.cache_ttl_minutes, 15, 'cache TTL should be 15 minutes');
652    });
653  
654    test('summary includes cache entry count', async () => {
655      clearCache();
656      const result = await precomputeDashboard();
657      assert.ok(result.summary.includes('Pre-computed'), 'summary should mention Pre-computed');
658      assert.ok(result.summary.includes('cache entries'), 'summary should mention cache entries');
659    });
660  
661    test('details.cache_expiration_minutes is 15', async () => {
662      clearCache();
663      const result = await precomputeDashboard();
664      assert.equal(result.details.cache_expiration_minutes, 15);
665    });
666  
667    test('details.skipped is an array', async () => {
668      clearCache();
669      const result = await precomputeDashboard();
670      assert.ok(Array.isArray(result.details.skipped), 'skipped should be an array');
671    });
672  });
673  
674  describe('precomputeDashboard — cache upsert behaviour', () => {
675    test('running twice updates existing cache entries (upsert)', async () => {
676      clearCache();
677      const r1 = await precomputeDashboard();
678      const r2 = await precomputeDashboard();
679      // Both runs should succeed and produce cache entries
680      assert.ok(r1.metrics.entries_cached > 0, 'first run should write entries');
681      assert.ok(r2.metrics.entries_cached > 0, 'second run should update entries');
682      // Cache should not have duplicates (upsert)
683      const rows = db.prepare('SELECT cache_key FROM dashboard_cache').all();
684      const keys = rows.map(r => r.cache_key);
685      const uniqueKeys = new Set(keys);
686      assert.equal(keys.length, uniqueKeys.size, 'should have no duplicate keys');
687    });
688  
689    test('all cache entries have valid JSON values', async () => {
690      clearCache();
691      await precomputeDashboard();
692      const rows = db.prepare('SELECT cache_key, cache_value FROM dashboard_cache').all();
693      assert.ok(rows.length > 0, 'should have cached some entries');
694      for (const row of rows) {
695        assert.doesNotThrow(
696          () => JSON.parse(row.cache_value),
697          `cache_value for "${row.cache_key}" should be valid JSON`
698        );
699      }
700    });
701  
702    test('all cache entries have future expires_at', async () => {
703      clearCache();
704      await precomputeDashboard();
705      const rows = db.prepare('SELECT cache_key, expires_at FROM dashboard_cache').all();
706      const now = new Date();
707      for (const row of rows) {
708        if (row.cache_key === 'cost_forecast') continue; // 4-day TTL, already tested
709        const exp = new Date(row.expires_at);
710        assert.ok(exp > now, `expires_at for "${row.cache_key}" should be in the future`);
711      }
712    });
713  });