Files
Neon-Desk/lib/server/api/task-workflow-hybrid.e2e.test.ts

1367 lines
43 KiB
TypeScript

import {
afterAll,
beforeAll,
beforeEach,
describe,
expect,
it,
mock
} from 'bun:test';
import { mkdtempSync, readFileSync, rmSync } from 'node:fs';
import { tmpdir } from 'node:os';
import { join } from 'node:path';
import { Database } from 'bun:sqlite';
import type { WorkflowRunStatus } from '@workflow/world';
const TEST_USER_ID = 'e2e-user';
const TEST_USER_EMAIL = 'e2e@example.com';
const TEST_USER_NAME = 'E2E User';
const runStatuses = new Map<string, WorkflowRunStatus>();
let runCounter = 0;
let workflowBackendHealthy = true;
let tempDir: string | null = null;
let sqliteClient: Database | null = null;
let app: { handle: (request: Request) => Promise<Response> } | null = null;
mock.module('workflow/api', () => ({
start: mock(async () => {
runCounter += 1;
const runId = `run-${runCounter}`;
runStatuses.set(runId, 'pending');
return { runId };
}),
getRun: mock((runId: string) => ({
get status() {
return Promise.resolve(runStatuses.get(runId) ?? 'pending');
}
}))
}));
mock.module('workflow/runtime', () => ({
getWorld: () => ({
runs: {
list: async () => {
if (!workflowBackendHealthy) {
throw new Error('Workflow backend unavailable');
}
return {
data: []
};
}
}
})
}));
mock.module('@/lib/server/auth-session', () => ({
requireAuthenticatedSession: async () => ({
session: {
user: {
id: TEST_USER_ID,
email: TEST_USER_EMAIL,
name: TEST_USER_NAME,
image: null
}
},
response: null
})
}));
function resetDbSingletons() {
const globalState = globalThis as typeof globalThis & {
__fiscalSqliteClient?: { close?: () => void };
__fiscalDrizzleDb?: unknown;
__financialIngestionSchemaStatus?: unknown;
};
globalState.__fiscalSqliteClient?.close?.();
globalState.__fiscalSqliteClient = undefined;
globalState.__fiscalDrizzleDb = undefined;
globalState.__financialIngestionSchemaStatus = undefined;
}
function setFinancialIngestionSchemaStatus(input: {
ok: boolean;
mode: 'healthy' | 'repaired' | 'drifted' | 'failed';
missingIndexes?: string[];
duplicateGroups?: number;
}) {
const globalState = globalThis as typeof globalThis & {
__financialIngestionSchemaStatus?: {
ok: boolean;
mode: 'healthy' | 'repaired' | 'drifted' | 'failed';
requestedMode: 'auto' | 'check-only' | 'off';
missingIndexes: string[];
duplicateGroups: number;
lastCheckedAt: string;
repair: null;
error: string | null;
};
};
globalState.__financialIngestionSchemaStatus = {
ok: input.ok,
mode: input.mode,
requestedMode: 'auto',
missingIndexes: input.missingIndexes ?? [],
duplicateGroups: input.duplicateGroups ?? 0,
lastCheckedAt: new Date().toISOString(),
repair: null,
error: input.ok ? null : 'schema drift injected by test'
};
}
function applySqlMigrations(client: { exec: (query: string) => void }) {
const migrationFiles = [
'0000_cold_silver_centurion.sql',
'0001_glossy_statement_snapshots.sql',
'0002_workflow_task_projection_metadata.sql',
'0003_task_stage_event_timeline.sql',
'0004_watchlist_company_taxonomy.sql',
'0005_financial_taxonomy_v3.sql',
'0006_coverage_journal_tracking.sql',
'0007_company_financial_bundles.sql',
'0008_research_workspace.sql',
'0009_task_notification_context.sql',
'0010_taxonomy_surface_sidecar.sql',
'0011_remove_legacy_xbrl_defaults.sql',
'0012_company_overview_cache.sql'
];
for (const file of migrationFiles) {
const sql = readFileSync(join(process.cwd(), 'drizzle', file), 'utf8');
client.exec(sql);
}
}
function ensureTestUser(client: { exec: (query: string) => void }) {
const now = Date.now();
client.exec(`
INSERT OR REPLACE INTO user (
id, name, email, emailVerified, image, createdAt, updatedAt, role, banned, banReason, banExpires
) VALUES (
'${TEST_USER_ID}',
'${TEST_USER_NAME}',
'${TEST_USER_EMAIL}',
1,
NULL,
${now},
${now},
NULL,
0,
NULL,
NULL
);
`);
}
function clearProjectionTables(client: { exec: (query: string) => void }) {
client.exec('DELETE FROM task_stage_event;');
client.exec('DELETE FROM task_run;');
client.exec('DELETE FROM research_journal_entry;');
client.exec('DELETE FROM holding;');
client.exec('DELETE FROM watchlist_item;');
client.exec('DELETE FROM portfolio_insight;');
client.exec('DELETE FROM company_overview_cache;');
client.exec('DELETE FROM filing;');
client.exec('DELETE FROM issuer_overlay;');
client.exec('DELETE FROM issuer_overlay_revision;');
}
function seedFilingRecord(client: Database, input: {
ticker: string;
accessionNumber: string;
filingType: '10-K' | '10-Q' | '8-K';
filingDate: string;
companyName: string;
cik?: string;
metrics?: {
revenue: number | null;
netIncome: number | null;
totalAssets: number | null;
cash: number | null;
debt: number | null;
} | null;
analysisText?: string | null;
}) {
const now = new Date().toISOString();
client.query(`
INSERT INTO filing (
ticker,
filing_type,
filing_date,
accession_number,
cik,
company_name,
filing_url,
submission_url,
primary_document,
metrics,
analysis,
created_at,
updated_at
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);
`).run(
input.ticker,
input.filingType,
input.filingDate,
input.accessionNumber,
input.cik ?? '0000000000',
input.companyName,
`https://www.sec.gov/Archives/${input.accessionNumber}.htm`,
`https://www.sec.gov/submissions/${input.accessionNumber}.json`,
`${input.accessionNumber}.htm`,
input.metrics ? JSON.stringify(input.metrics) : null,
input.analysisText
? JSON.stringify({
provider: 'test',
model: 'fixture',
text: input.analysisText
})
: null,
now,
now
);
}
async function jsonRequest(
method: 'GET' | 'POST' | 'PATCH' | 'DELETE',
path: string,
body?: Record<string, unknown>
) {
if (!app) {
throw new Error('app not initialized');
}
const response = await app.handle(new Request(`http://localhost${path}`, {
method,
headers: body ? { 'content-type': 'application/json' } : undefined,
body: body ? JSON.stringify(body) : undefined
}));
return {
response,
json: await response.json()
};
}
function buildCachedAnalysisPayload(input: {
ticker: string;
companyName: string;
bull?: string[];
}) {
return {
company: {
ticker: input.ticker,
companyName: input.companyName,
sector: null,
category: null,
tags: [],
cik: null
},
quote: 100,
position: null,
priceHistory: [],
benchmarkHistory: [],
financials: [],
filings: [],
aiReports: [],
coverage: null,
journalPreview: [],
recentAiReports: [],
latestFilingSummary: null,
keyMetrics: {
referenceDate: null,
revenue: null,
netIncome: null,
totalAssets: null,
cash: null,
debt: null,
netMargin: null
},
companyProfile: {
description: null,
exchange: null,
industry: null,
country: null,
website: null,
fiscalYearEnd: null,
employeeCount: null,
source: 'unavailable'
},
valuationSnapshot: {
sharesOutstanding: null,
marketCap: null,
enterpriseValue: null,
trailingPe: null,
evToRevenue: null,
evToEbitda: null,
source: 'unavailable'
},
bullBear: {
source: input.bull && input.bull.length > 0 ? 'memo_fallback' : 'unavailable',
bull: input.bull ?? [],
bear: [],
updatedAt: new Date().toISOString()
},
recentDevelopments: {
status: 'unavailable',
items: [],
weeklySnapshot: null
}
};
}
if (process.env.RUN_TASK_WORKFLOW_E2E === '1') {
describe('task workflow hybrid migration e2e', () => {
beforeAll(async () => {
tempDir = mkdtempSync(join(tmpdir(), 'fiscal-task-e2e-'));
const env = process.env as Record<string, string | undefined>;
env.DATABASE_URL = `file:${join(tempDir, 'e2e.sqlite')}`;
env.NODE_ENV = 'test';
resetDbSingletons();
sqliteClient = new Database(join(tempDir, 'e2e.sqlite'), { create: true });
sqliteClient.exec('PRAGMA foreign_keys = ON;');
applySqlMigrations(sqliteClient);
ensureTestUser(sqliteClient);
const appModule = await import('./app');
app = appModule.app;
});
afterAll(() => {
sqliteClient?.close();
resetDbSingletons();
if (tempDir) {
rmSync(tempDir, { recursive: true, force: true });
}
});
beforeEach(() => {
if (!sqliteClient) {
throw new Error('sqlite client not initialized');
}
clearProjectionTables(sqliteClient);
runStatuses.clear();
runCounter = 0;
workflowBackendHealthy = true;
setFinancialIngestionSchemaStatus({
ok: true,
mode: 'healthy'
});
});
it('queues multiple analyze jobs and suppresses duplicate in-flight analyze jobs', async () => {
const first = await jsonRequest('POST', '/api/filings/0000000000-26-000001/analyze');
expect(first.response.status).toBe(200);
const firstTaskId = (first.json as { task: { id: string } }).task.id;
const [second, third] = await Promise.all([
jsonRequest('POST', '/api/filings/0000000000-26-000002/analyze'),
jsonRequest('POST', '/api/filings/0000000000-26-000003/analyze')
]);
expect(second.response.status).toBe(200);
expect(third.response.status).toBe(200);
const duplicate = await jsonRequest('POST', '/api/filings/0000000000-26-000001/analyze');
expect(duplicate.response.status).toBe(200);
expect((duplicate.json as { task: { id: string } }).task.id).toBe(firstTaskId);
const tasksResponse = await jsonRequest('GET', '/api/tasks?limit=10');
expect(tasksResponse.response.status).toBe(200);
const tasks = (tasksResponse.json as {
tasks: Array<{
id: string;
status: string;
stage: string;
workflow_run_id?: string | null;
}>;
}).tasks;
expect(tasks.length).toBe(3);
expect(tasks.every((task) => task.status === 'queued')).toBe(true);
expect(tasks.every((task) => task.stage === 'queued')).toBe(true);
expect(tasks.every((task) => typeof task.workflow_run_id === 'string' && task.workflow_run_id.length > 0)).toBe(true);
});
it('persists watchlist category and tags without auto queueing a filing sync task', async () => {
const created = await jsonRequest('POST', '/api/watchlist', {
ticker: 'shop',
companyName: 'Shopify Inc.',
sector: 'Technology',
category: 'core',
tags: ['growth', 'ecommerce', 'growth', ' ']
});
expect(created.response.status).toBe(200);
const createdBody = created.json as {
item: {
ticker: string;
category: string | null;
tags: string[];
};
autoFilingSyncQueued: boolean;
};
expect(createdBody.item.ticker).toBe('SHOP');
expect(createdBody.item.category).toBe('core');
expect(createdBody.item.tags).toEqual(['growth', 'ecommerce']);
expect(createdBody.autoFilingSyncQueued).toBe(false);
const tasksResponse = await jsonRequest('GET', '/api/tasks?limit=5');
expect(tasksResponse.response.status).toBe(200);
const syncTasks = (tasksResponse.json as {
tasks: Array<{
task_type: string;
payload: {
ticker?: string;
category?: string;
tags?: string[];
limit?: number;
};
}>;
}).tasks.filter((entry) => entry.task_type === 'sync_filings');
expect(syncTasks).toHaveLength(0);
});
it('does not queue a filing sync task when coverage metadata is edited', async () => {
const created = await jsonRequest('POST', '/api/watchlist', {
ticker: 'amd',
companyName: 'Advanced Micro Devices, Inc.',
sector: 'Technology',
category: 'watch',
tags: ['semis']
});
expect(created.response.status).toBe(200);
const item = (created.json as {
item: { id: number };
}).item;
const updated = await jsonRequest('PATCH', `/api/watchlist/${item.id}`, {
category: 'core',
tags: ['semis', 'ai']
});
expect(updated.response.status).toBe(200);
const tasksResponse = await jsonRequest('GET', '/api/tasks?limit=5');
expect(tasksResponse.response.status).toBe(200);
const syncTasks = (tasksResponse.json as {
tasks: Array<{ task_type: string }>;
}).tasks.filter((entry) => entry.task_type === 'sync_filings');
expect(syncTasks).toHaveLength(0);
});
it('forwards watchlist metadata when filing sync is started explicitly', async () => {
const created = await jsonRequest('POST', '/api/watchlist', {
ticker: 'shop',
companyName: 'Shopify Inc.',
sector: 'Technology',
category: 'core',
tags: ['growth', 'ecommerce', 'growth', ' ']
});
expect(created.response.status).toBe(200);
const createdBody = created.json as {
item: {
ticker: string;
category: string | null;
tags: string[];
};
};
const sync = await jsonRequest('POST', '/api/filings/sync', {
ticker: createdBody.item.ticker,
limit: 20,
category: createdBody.item.category,
tags: createdBody.item.tags
});
expect(sync.response.status).toBe(200);
const task = (sync.json as {
task: {
task_type: string;
payload: {
ticker: string;
limit: number;
category?: string;
tags?: string[];
};
};
}).task;
expect(task.task_type).toBe('sync_filings');
expect(task.payload.ticker).toBe('SHOP');
expect(task.payload.limit).toBe(20);
expect(task.payload.category).toBe('core');
expect(task.payload.tags).toEqual(['growth', 'ecommerce']);
});
it('accepts category and comma-separated tags on manual filings sync payload', async () => {
const sync = await jsonRequest('POST', '/api/filings/sync', {
ticker: 'nvda',
limit: 15,
category: 'watch',
tags: 'semis, ai, semis'
});
expect(sync.response.status).toBe(200);
const task = (sync.json as {
task: {
task_type: string;
payload: {
ticker: string;
limit: number;
category?: string;
tags?: string[];
};
};
}).task;
expect(task.task_type).toBe('sync_filings');
expect(task.payload.ticker).toBe('NVDA');
expect(task.payload.limit).toBe(15);
expect(task.payload.category).toBe('watch');
expect(task.payload.tags).toEqual(['semis', 'ai']);
});
it('reuses the same in-flight filing sync task for repeated same-ticker requests', async () => {
const first = await jsonRequest('POST', '/api/filings/sync', {
ticker: 'NVDA',
limit: 20
});
const second = await jsonRequest('POST', '/api/filings/sync', {
ticker: 'nvda',
limit: 20
});
expect(first.response.status).toBe(200);
expect(second.response.status).toBe(200);
const firstTask = (first.json as { task: { id: string } }).task;
const secondTask = (second.json as { task: { id: string } }).task;
expect(secondTask.id).toBe(firstTask.id);
const tasksResponse = await jsonRequest('GET', '/api/tasks?limit=10&status=queued&status=running');
expect(tasksResponse.response.status).toBe(200);
const tasks = (tasksResponse.json as {
tasks: Array<{ id: string; task_type: string; payload: { ticker?: string } }>;
}).tasks.filter((task) => task.task_type === 'sync_filings' && task.payload.ticker === 'NVDA');
expect(tasks).toHaveLength(1);
});
it('queues ticker automation only once per ticker via the explicit ensure endpoint', async () => {
const first = await jsonRequest('POST', '/api/tickers/ensure', {
ticker: 'NVDA',
source: 'search'
});
const second = await jsonRequest('POST', '/api/tickers/ensure', {
ticker: 'nvda',
source: 'analysis'
});
expect(first.response.status).toBe(200);
expect(second.response.status).toBe(200);
const firstBody = first.json as { queued: boolean; task: { id: string } | null };
const secondBody = second.json as { queued: boolean; task: { id: string } | null };
expect(firstBody.queued).toBe(true);
expect(secondBody.queued).toBe(true);
expect(secondBody.task?.id).toBe(firstBody.task?.id);
});
it('lets different tickers queue independent filing sync tasks', async () => {
const nvda = await jsonRequest('POST', '/api/filings/sync', { ticker: 'NVDA', limit: 20 });
const msft = await jsonRequest('POST', '/api/filings/sync', { ticker: 'MSFT', limit: 20 });
const aapl = await jsonRequest('POST', '/api/filings/sync', { ticker: 'AAPL', limit: 20 });
const ids = [
(nvda.json as { task: { id: string } }).task.id,
(msft.json as { task: { id: string } }).task.id,
(aapl.json as { task: { id: string } }).task.id
];
expect(new Set(ids).size).toBe(3);
const tasksResponse = await jsonRequest('GET', '/api/tasks?limit=10&status=queued&status=running');
expect(tasksResponse.response.status).toBe(200);
const syncTickers = (tasksResponse.json as {
tasks: Array<{ task_type: string; payload: { ticker?: string } }>;
}).tasks
.filter((task) => task.task_type === 'sync_filings')
.map((task) => task.payload.ticker)
.filter((ticker): ticker is string => typeof ticker === 'string');
expect(syncTickers.sort()).toEqual(['AAPL', 'MSFT', 'NVDA']);
});
it('scopes the filings endpoint by ticker while leaving the global endpoint mixed', async () => {
if (!sqliteClient) {
throw new Error('sqlite client not initialized');
}
seedFilingRecord(sqliteClient, {
ticker: 'NVDA',
accessionNumber: '0000000000-26-000110',
filingType: '10-Q',
filingDate: '2026-03-12',
companyName: 'NVIDIA Corporation'
});
seedFilingRecord(sqliteClient, {
ticker: 'MSFT',
accessionNumber: '0000000000-26-000111',
filingType: '10-K',
filingDate: '2026-03-11',
companyName: 'Microsoft Corporation'
});
const scoped = await jsonRequest('GET', '/api/filings?ticker=NVDA&limit=120');
expect(scoped.response.status).toBe(200);
const scopedFilings = (scoped.json as {
filings: Array<{ ticker: string }>;
}).filings;
expect(scopedFilings.length).toBeGreaterThan(0);
expect(scopedFilings.every((filing) => filing.ticker === 'NVDA')).toBe(true);
const global = await jsonRequest('GET', '/api/filings?limit=120');
expect(global.response.status).toBe(200);
const globalTickers = new Set((global.json as {
filings: Array<{ ticker: string }>;
}).filings.map((filing) => filing.ticker));
expect(globalTickers.has('NVDA')).toBe(true);
expect(globalTickers.has('MSFT')).toBe(true);
});
it('updates coverage status and archives while appending status-change journal history', async () => {
const created = await jsonRequest('POST', '/api/watchlist', {
ticker: 'amd',
companyName: 'Advanced Micro Devices, Inc.',
sector: 'Technology',
status: 'backlog',
priority: 'medium',
tags: ['semis']
});
expect(created.response.status).toBe(200);
const createdItem = (created.json as {
item: { id: number; ticker: string; status: string; priority: string };
}).item;
expect(createdItem.status).toBe('backlog');
expect(createdItem.priority).toBe('medium');
const activated = await jsonRequest('PATCH', `/api/watchlist/${createdItem.id}`, {
status: 'active',
priority: 'high',
lastReviewedAt: '2026-03-01T15:30:00.000Z'
});
expect(activated.response.status).toBe(200);
const activatedBody = activated.json as {
item: { status: string; priority: string; last_reviewed_at: string | null };
statusChangeJournalCreated: boolean;
};
expect(activatedBody.item.status).toBe('active');
expect(activatedBody.item.priority).toBe('high');
expect(activatedBody.item.last_reviewed_at).toBe('2026-03-01T15:30:00.000Z');
expect(activatedBody.statusChangeJournalCreated).toBe(true);
const archived = await jsonRequest('PATCH', `/api/watchlist/${createdItem.id}`, {
status: 'archive'
});
expect(archived.response.status).toBe(200);
expect((archived.json as {
item: { status: string };
statusChangeJournalCreated: boolean;
}).item.status).toBe('archive');
const journal = await jsonRequest('GET', '/api/research/journal?ticker=AMD');
expect(journal.response.status).toBe(200);
const entries = (journal.json as {
entries: Array<{
entry_type: string;
title: string | null;
}>;
}).entries;
expect(entries.length).toBe(2);
expect(entries.every((entry) => entry.entry_type === 'status_change')).toBe(true);
expect(entries[0]?.title).toContain('Archive');
const coverage = await jsonRequest('GET', '/api/watchlist');
const saved = (coverage.json as {
items: Array<{
ticker: string;
status: string;
priority: string;
}>;
}).items.find((item) => item.ticker === 'AMD');
expect(saved?.status).toBe('archive');
expect(saved?.priority).toBe('high');
});
it('supports journal CRUD and includes coverage, preview, reports, and key metrics in analysis payload', async () => {
if (!sqliteClient) {
throw new Error('sqlite client not initialized');
}
seedFilingRecord(sqliteClient, {
ticker: 'NFLX',
accessionNumber: '0000000000-26-000777',
filingType: '10-K',
filingDate: '2026-03-10',
companyName: 'Netflix, Inc.',
metrics: {
revenue: 41000000000,
netIncome: 8600000000,
totalAssets: 52000000000,
cash: 7800000000,
debt: 14000000000
},
analysisText: 'Subscriber growth reaccelerated with improved operating leverage.'
});
await jsonRequest('POST', '/api/watchlist', {
ticker: 'nflx',
companyName: 'Netflix, Inc.',
sector: 'Communication Services',
status: 'active',
priority: 'high',
tags: ['streaming', 'quality']
});
await jsonRequest('POST', '/api/portfolio/holdings', {
ticker: 'NFLX',
companyName: 'Netflix, Inc.',
shares: 12,
avgCost: 440,
currentPrice: 455
});
const createdEntry = await jsonRequest('POST', '/api/research/journal', {
ticker: 'NFLX',
entryType: 'note',
title: 'Thesis refresh',
bodyMarkdown: 'Monitor ad-tier margin progression and content amortization.'
});
expect(createdEntry.response.status).toBe(200);
const entryId = (createdEntry.json as {
entry: { id: number };
}).entry.id;
const analysis = await jsonRequest('GET', '/api/analysis/company?ticker=NFLX');
expect(analysis.response.status).toBe(200);
const payload = (analysis.json as {
analysis: {
coverage: { status: string; priority: string; tags: string[] } | null;
journalPreview: Array<{ title: string | null; body_markdown: string }>;
recentAiReports: Array<{ accessionNumber: string; summary: string }>;
latestFilingSummary: { accessionNumber: string; summary: string | null } | null;
keyMetrics: { revenue: number | null; netMargin: number | null };
position: { company_name: string | null } | null;
companyProfile: { source: string; description: string | null };
valuationSnapshot: { source: string; marketCap: number | null; evToRevenue: number | null };
bullBear: { source: string; bull: string[]; bear: string[] };
recentDevelopments: {
status: string;
items: Array<{ kind: string; accessionNumber: string | null }>;
weeklySnapshot: { source: string; itemCount: number } | null;
};
};
}).analysis;
expect(payload.coverage?.status).toBe('active');
expect(payload.coverage?.priority).toBe('high');
expect(payload.coverage?.tags).toEqual(['streaming', 'quality']);
expect(payload.journalPreview.length).toBe(1);
expect(payload.journalPreview[0]?.title).toBe('Thesis refresh');
expect(payload.recentAiReports.length).toBe(1);
expect(payload.latestFilingSummary?.accessionNumber).toBe('0000000000-26-000777');
expect(payload.latestFilingSummary?.summary).toContain('Subscriber growth reaccelerated');
expect(payload.keyMetrics.revenue).toBe(41000000000);
expect(payload.keyMetrics.netMargin).not.toBeNull();
expect(payload.position?.company_name).toBe('Netflix, Inc.');
expect(['sec_derived', 'unavailable']).toContain(payload.companyProfile.source);
expect(['derived', 'partial', 'unavailable']).toContain(payload.valuationSnapshot.source);
expect(['ai_synthesized', 'memo_fallback', 'unavailable']).toContain(payload.bullBear.source);
expect(['ready', 'partial', 'unavailable']).toContain(payload.recentDevelopments.status);
expect(payload.recentDevelopments.items[0]?.accessionNumber).toBe('0000000000-26-000777');
expect(payload.recentDevelopments.weeklySnapshot?.itemCount ?? 0).toBeGreaterThanOrEqual(1);
const updatedEntry = await jsonRequest('PATCH', `/api/research/journal/${entryId}`, {
title: 'Thesis refresh v2',
bodyMarkdown: 'Monitor ad-tier margin progression, churn, and cash content spend.'
});
expect(updatedEntry.response.status).toBe(200);
expect((updatedEntry.json as {
entry: { title: string | null; body_markdown: string };
}).entry.title).toBe('Thesis refresh v2');
const journalAfterUpdate = await jsonRequest('GET', '/api/research/journal?ticker=NFLX');
expect(journalAfterUpdate.response.status).toBe(200);
expect((journalAfterUpdate.json as {
entries: Array<{ title: string | null; body_markdown: string }>;
}).entries[0]?.body_markdown).toContain('cash content spend');
const removed = await jsonRequest('DELETE', `/api/research/journal/${entryId}`);
expect(removed.response.status).toBe(200);
const journalAfterDelete = await jsonRequest('GET', '/api/research/journal?ticker=NFLX');
expect((journalAfterDelete.json as {
entries: unknown[];
}).entries).toHaveLength(0);
});
it('serves cached analysis until refresh is requested', async () => {
if (!sqliteClient) {
throw new Error('sqlite client not initialized');
}
seedFilingRecord(sqliteClient, {
ticker: 'CACH',
accessionNumber: '0000000000-26-000901',
filingType: '10-K',
filingDate: '2026-02-20',
companyName: 'Live Corp'
});
const filingRow = sqliteClient.query(`
SELECT created_at, updated_at
FROM filing
WHERE ticker = 'CACH'
ORDER BY id DESC
LIMIT 1
`).get() as { created_at: string; updated_at: string } | null;
if (!filingRow) {
throw new Error('cached filing row not found');
}
const { __companyAnalysisInternals } = await import('../company-analysis');
const sourceSignature = __companyAnalysisInternals.buildCompanyAnalysisSourceSignature({
ticker: 'CACH',
localInputs: {
filings: [{
id: 1,
ticker: 'CACH',
filing_type: '10-K',
filing_date: '2026-02-20',
accession_number: '0000000000-26-000901',
cik: '0000000000',
company_name: 'Live Corp',
filing_url: 'https://www.sec.gov/Archives/0000000000-26-000901.htm',
submission_url: 'https://www.sec.gov/submissions/0000000000-26-000901.json',
primary_document: '0000000000-26-000901.htm',
metrics: null,
analysis: null,
created_at: filingRow.created_at,
updated_at: filingRow.updated_at
}],
holding: null,
watchlistItem: null,
journalPreview: [],
memo: null
}
});
const now = new Date().toISOString();
sqliteClient.query(`
INSERT INTO company_overview_cache (
user_id,
ticker,
cache_version,
source_signature,
payload,
created_at,
updated_at
) VALUES (?, ?, ?, ?, ?, ?, ?)
`).run(
TEST_USER_ID,
'CACH',
1,
sourceSignature,
JSON.stringify(buildCachedAnalysisPayload({
ticker: 'CACH',
companyName: 'Cached Corp'
})),
now,
now
);
const cached = await jsonRequest('GET', '/api/analysis/company?ticker=CACH');
expect(cached.response.status).toBe(200);
expect((cached.json as {
analysis: { company: { companyName: string } };
}).analysis.company.companyName).toBe('Cached Corp');
const refreshed = await jsonRequest('GET', '/api/analysis/company?ticker=CACH&refresh=true');
expect(refreshed.response.status).toBe(200);
expect((refreshed.json as {
analysis: { company: { companyName: string } };
}).analysis.company.companyName).toBe('Live Corp');
});
it('invalidates cached analysis when the memo changes', async () => {
if (!sqliteClient) {
throw new Error('sqlite client not initialized');
}
seedFilingRecord(sqliteClient, {
ticker: 'MEMO',
accessionNumber: '0000000000-26-000902',
filingType: '10-K',
filingDate: '2026-02-20',
companyName: 'Memo Corp'
});
sqliteClient.query(`
INSERT INTO research_memo (
user_id,
organization_id,
ticker,
rating,
conviction,
time_horizon_months,
packet_title,
packet_subtitle,
thesis_markdown,
variant_view_markdown,
catalysts_markdown,
risks_markdown,
disconfirming_evidence_markdown,
next_actions_markdown,
created_at,
updated_at
) VALUES (?, NULL, ?, 'buy', 'high', 24, NULL, NULL, ?, '', '', '', '', '', ?, ?)
`).run(
TEST_USER_ID,
'MEMO',
'Legacy thesis still holds.',
'2026-03-13T00:00:00.000Z',
'2026-03-13T00:00:00.000Z'
);
const first = await jsonRequest('GET', '/api/analysis/company?ticker=MEMO');
expect(first.response.status).toBe(200);
expect((first.json as {
analysis: { bullBear: { bull: string[] } };
}).analysis.bullBear.bull.join(' ')).toContain('Legacy thesis');
sqliteClient.query(`
UPDATE research_memo
SET thesis_markdown = ?, updated_at = ?
WHERE user_id = ? AND ticker = ?
`).run(
'Updated thesis drives the next refresh.',
'2026-03-13T01:00:00.000Z',
TEST_USER_ID,
'MEMO'
);
const second = await jsonRequest('GET', '/api/analysis/company?ticker=MEMO');
expect(second.response.status).toBe(200);
expect((second.json as {
analysis: { bullBear: { bull: string[] } };
}).analysis.bullBear.bull.join(' ')).toContain('Updated thesis');
});
it('persists nullable holding company names and allows later enrichment', async () => {
const created = await jsonRequest('POST', '/api/portfolio/holdings', {
ticker: 'ORCL',
shares: 5,
avgCost: 100,
currentPrice: 110
});
expect(created.response.status).toBe(200);
const holdings = await jsonRequest('GET', '/api/portfolio/holdings');
expect(holdings.response.status).toBe(200);
const saved = (holdings.json as {
holdings: Array<{
id: number;
ticker: string;
company_name: string | null;
}>;
}).holdings.find((entry) => entry.ticker === 'ORCL');
expect(saved?.company_name).toBeNull();
const updated = await jsonRequest('PATCH', `/api/portfolio/holdings/${saved?.id}`, {
companyName: 'Oracle Corporation'
});
expect(updated.response.status).toBe(200);
expect((updated.json as {
holding: { company_name: string | null };
}).holding.company_name).toBe('Oracle Corporation');
});
it('updates notification read and silenced state via patch endpoint', async () => {
const created = await jsonRequest('POST', '/api/filings/0000000000-26-000010/analyze');
const taskId = (created.json as { task: { id: string } }).task.id;
const readUpdate = await jsonRequest('PATCH', `/api/tasks/${taskId}/notification`, { read: true });
expect(readUpdate.response.status).toBe(200);
const readTask = (readUpdate.json as {
task: {
notification_read_at: string | null;
notification_silenced_at: string | null;
};
}).task;
expect(readTask.notification_read_at).toBeTruthy();
expect(readTask.notification_silenced_at).toBeNull();
const silencedUpdate = await jsonRequest('PATCH', `/api/tasks/${taskId}/notification`, {
silenced: true
});
expect(silencedUpdate.response.status).toBe(200);
const silencedTask = (silencedUpdate.json as {
task: {
notification_read_at: string | null;
notification_silenced_at: string | null;
};
}).task;
expect(silencedTask.notification_read_at).toBeTruthy();
expect(silencedTask.notification_silenced_at).toBeTruthy();
const resetUpdate = await jsonRequest('PATCH', `/api/tasks/${taskId}/notification`, {
read: false,
silenced: false
});
expect(resetUpdate.response.status).toBe(200);
const resetTask = (resetUpdate.json as {
task: {
notification_read_at: string | null;
notification_silenced_at: string | null;
};
}).task;
expect(resetTask.notification_read_at).toBeNull();
expect(resetTask.notification_silenced_at).toBeNull();
});
it('returns enriched stage context and notification payloads for tasks and timelines', async () => {
if (!sqliteClient) {
throw new Error('sqlite client not initialized');
}
const created = await jsonRequest('POST', '/api/filings/0000000000-26-000010/analyze');
const taskId = (created.json as { task: { id: string } }).task.id;
const now = new Date().toISOString();
const stageContext = JSON.stringify({
progress: {
current: 2,
total: 5,
unit: 'steps'
},
subject: {
accessionNumber: '0000000000-26-000010'
}
});
sqliteClient.query(`
UPDATE task_run
SET status = ?, stage = ?, stage_detail = ?, stage_context = ?, workflow_run_id = NULL, updated_at = ?
WHERE id = ?;
`).run(
'running',
'analyze.extract',
'Generating extraction context from filing text',
stageContext,
now,
taskId
);
sqliteClient.query(`
INSERT INTO task_stage_event (task_id, user_id, stage, stage_detail, stage_context, status, created_at)
VALUES (?, ?, ?, ?, ?, ?, ?);
`).run(
taskId,
TEST_USER_ID,
'analyze.extract',
'Generating extraction context from filing text',
stageContext,
'running',
now
);
const tasksResponse = await jsonRequest('GET', '/api/tasks?limit=5');
expect(tasksResponse.response.status).toBe(200);
const apiTask = (tasksResponse.json as {
tasks: Array<{
id: string;
stage_context: { progress?: { current: number } | null } | null;
notification: { title: string; actions: Array<{ id: string }> };
}>;
}).tasks.find((entry) => entry.id === taskId);
expect(apiTask?.stage_context?.progress?.current).toBe(2);
expect(apiTask?.notification.title).toBe('Filing analysis');
expect(apiTask?.notification.actions.some((action) => action.id === 'open_filings')).toBe(true);
const timeline = await jsonRequest('GET', `/api/tasks/${taskId}/timeline`);
expect(timeline.response.status).toBe(200);
const event = (timeline.json as {
events: Array<{
stage: string;
stage_context: { progress?: { total: number } | null } | null;
}>;
}).events.find((entry) => entry.stage === 'analyze.extract');
expect(event?.stage_context?.progress?.total).toBe(5);
});
it('returns task-specific notification actions for completed and failed analyze tasks', async () => {
if (!sqliteClient) {
throw new Error('sqlite client not initialized');
}
const completedCreate = await jsonRequest('POST', '/api/filings/0000000000-26-000020/analyze');
const completedTaskId = (completedCreate.json as { task: { id: string } }).task.id;
sqliteClient.query(`
UPDATE task_run
SET status = ?, stage = ?, stage_detail = ?, stage_context = ?, result = ?, workflow_run_id = NULL, updated_at = ?, finished_at = ?
WHERE id = ?;
`).run(
'completed',
'completed',
'Analysis report generated for AAPL 10-Q 0000000000-26-000020.',
JSON.stringify({
subject: {
ticker: 'AAPL',
accessionNumber: '0000000000-26-000020',
label: '10-Q'
}
}),
JSON.stringify({
ticker: 'AAPL',
accessionNumber: '0000000000-26-000020',
filingType: '10-Q',
provider: 'test',
model: 'fixture',
extractionProvider: 'test',
extractionModel: 'fixture',
searchTaskId: null
}),
'2026-03-09T15:00:00.000Z',
'2026-03-09T15:00:00.000Z',
completedTaskId
);
const completed = await jsonRequest('GET', `/api/tasks/${completedTaskId}`);
expect(completed.response.status).toBe(200);
const completedActions = (completed.json as {
task: {
notification: { actions: Array<{ id: string; href: string | null }> };
};
}).task.notification.actions;
expect(completedActions[0]?.id).toBe('open_analysis_report');
expect(completedActions[0]?.href).toContain('/analysis/reports/AAPL/0000000000-26-000020');
const failedCreate = await jsonRequest('POST', '/api/filings/0000000000-26-000021/analyze');
const failedTaskId = (failedCreate.json as { task: { id: string } }).task.id;
sqliteClient.query(`
UPDATE task_run
SET status = ?, stage = ?, stage_detail = ?, stage_context = ?, error = ?, workflow_run_id = NULL, updated_at = ?, finished_at = ?
WHERE id = ?;
`).run(
'failed',
'analyze.fetch_document',
'Could not load the primary filing document.',
JSON.stringify({
subject: {
ticker: 'AAPL',
accessionNumber: '0000000000-26-000021'
}
}),
'Could not load the primary filing document for AAPL · 0000000000-26-000021. Retry the job after confirming the SEC source is reachable.',
'2026-03-09T15:01:00.000Z',
'2026-03-09T15:01:00.000Z',
failedTaskId
);
const failed = await jsonRequest('GET', `/api/tasks/${failedTaskId}`);
expect(failed.response.status).toBe(200);
const failedTask = (failed.json as {
task: {
notification: {
statusLine: string;
detailLine: string | null;
actions: Array<{ id: string; href: string | null }>;
};
};
}).task;
expect(failedTask.notification.statusLine).toBe('Failed during fetch primary document');
expect(failedTask.notification.detailLine).toBe('Could not load the primary filing document.');
expect(failedTask.notification.actions.some((action) => action.id === 'open_filings')).toBe(true);
});
it('reconciles workflow run status into projection state and degrades health when workflow backend is down', async () => {
const created = await jsonRequest('POST', '/api/filings/0000000000-26-000100/analyze');
const task = (created.json as {
task: { id: string; workflow_run_id: string };
}).task;
runStatuses.set(task.workflow_run_id, 'running');
const running = await jsonRequest('GET', `/api/tasks/${task.id}`);
expect(running.response.status).toBe(200);
const runningTask = (running.json as { task: { status: string; stage: string } }).task;
expect(runningTask.status).toBe('running');
expect(runningTask.stage).toBe('running');
runStatuses.set(task.workflow_run_id, 'completed');
const completed = await jsonRequest('GET', `/api/tasks/${task.id}`);
expect(completed.response.status).toBe(200);
const completedTask = (completed.json as {
task: {
status: string;
stage: string;
finished_at: string | null;
};
}).task;
expect(completedTask.status).toBe('completed');
expect(completedTask.stage).toBe('completed');
expect(completedTask.finished_at).toBeTruthy();
const timeline = await jsonRequest('GET', `/api/tasks/${task.id}/timeline`);
expect(timeline.response.status).toBe(200);
const events = (timeline.json as {
events: Array<{
stage: string;
status: string;
}>;
}).events;
expect(events.length).toBeGreaterThanOrEqual(3);
expect(events.some((event) => event.status === 'queued')).toBe(true);
expect(events.some((event) => event.status === 'running')).toBe(true);
expect(events.some((event) => event.status === 'completed')).toBe(true);
const healthy = await jsonRequest('GET', '/api/health');
expect(healthy.response.status).toBe(200);
expect((healthy.json as {
status: string;
workflow: { ok: boolean };
database: {
ingestionSchema: {
ok: boolean;
mode: string;
missingIndexes: string[];
duplicateGroups: number;
};
};
}).status).toBe('ok');
expect((healthy.json as { status: string; workflow: { ok: boolean } }).workflow.ok).toBe(true);
expect((healthy.json as {
database: {
ingestionSchema: {
ok: boolean;
mode: string;
missingIndexes: string[];
duplicateGroups: number;
};
};
}).database.ingestionSchema.ok).toBe(true);
expect((healthy.json as {
database: {
ingestionSchema: {
ok: boolean;
mode: string;
};
};
}).database.ingestionSchema.mode).toBe('healthy');
setFinancialIngestionSchemaStatus({
ok: false,
mode: 'drifted',
missingIndexes: ['company_financial_bundle_uidx'],
duplicateGroups: 1
});
const schemaDrifted = await jsonRequest('GET', '/api/health');
expect(schemaDrifted.response.status).toBe(503);
expect((schemaDrifted.json as {
status: string;
workflow: { ok: boolean };
database: {
ingestionSchema: {
ok: boolean;
mode: string;
missingIndexes: string[];
duplicateGroups: number;
};
};
}).status).toBe('degraded');
expect((schemaDrifted.json as {
workflow: { ok: boolean };
}).workflow.ok).toBe(true);
expect((schemaDrifted.json as {
database: {
ingestionSchema: {
ok: boolean;
mode: string;
missingIndexes: string[];
duplicateGroups: number;
};
};
}).database.ingestionSchema.ok).toBe(false);
expect((schemaDrifted.json as {
database: {
ingestionSchema: {
ok: boolean;
mode: string;
missingIndexes: string[];
duplicateGroups: number;
};
};
}).database.ingestionSchema.mode).toBe('drifted');
expect((schemaDrifted.json as {
database: {
ingestionSchema: {
missingIndexes: string[];
};
};
}).database.ingestionSchema.missingIndexes).toEqual(['company_financial_bundle_uidx']);
expect((schemaDrifted.json as {
database: {
ingestionSchema: {
duplicateGroups: number;
};
};
}).database.ingestionSchema.duplicateGroups).toBe(1);
setFinancialIngestionSchemaStatus({
ok: true,
mode: 'healthy'
});
workflowBackendHealthy = false;
const degraded = await jsonRequest('GET', '/api/health');
expect(degraded.response.status).toBe(503);
expect((degraded.json as {
status: string;
workflow: { ok: boolean; reason: string };
}).status).toBe('degraded');
expect((degraded.json as {
status: string;
workflow: { ok: boolean; reason: string };
}).workflow.ok).toBe(false);
});
});
}