Add hybrid research copilot workspace
This commit is contained in:
@@ -21,6 +21,7 @@ import { requireAuthenticatedSession } from '@/lib/server/auth-session';
|
||||
import { getLatestFinancialIngestionSchemaStatus } from '@/lib/server/db/financial-ingestion-schema';
|
||||
import { asErrorMessage, jsonError } from '@/lib/server/http';
|
||||
import { buildPortfolioSummary } from '@/lib/server/portfolio';
|
||||
import { runResearchCopilotTurn } from '@/lib/server/research-copilot';
|
||||
import {
|
||||
defaultFinancialSyncLimit,
|
||||
getCompanyFinancials
|
||||
@@ -61,6 +62,7 @@ import {
|
||||
listResearchJournalEntries,
|
||||
updateResearchJournalEntryRecord
|
||||
} from '@/lib/server/repos/research-journal';
|
||||
import { getResearchCopilotSessionByTicker } from '@/lib/server/repos/research-copilot';
|
||||
import {
|
||||
deleteWatchlistItemRecord,
|
||||
getWatchlistItemById,
|
||||
@@ -839,6 +841,116 @@ export const app = new Elysia({ prefix: '/api' })
|
||||
ticker: t.String({ minLength: 1 })
|
||||
})
|
||||
})
|
||||
.get('/research/copilot/session', async ({ query }) => {
|
||||
const { session, response } = await requireAuthenticatedSession();
|
||||
if (response) {
|
||||
return response;
|
||||
}
|
||||
|
||||
const ticker = typeof query.ticker === 'string' ? query.ticker.trim().toUpperCase() : '';
|
||||
if (!ticker) {
|
||||
return jsonError('ticker is required');
|
||||
}
|
||||
|
||||
const copilotSession = await getResearchCopilotSessionByTicker(session.user.id, ticker);
|
||||
return Response.json({ session: copilotSession });
|
||||
}, {
|
||||
query: t.Object({
|
||||
ticker: t.String({ minLength: 1 })
|
||||
})
|
||||
})
|
||||
.post('/research/copilot/turn', async ({ body }) => {
|
||||
const { session, response } = await requireAuthenticatedSession();
|
||||
if (response) {
|
||||
return response;
|
||||
}
|
||||
|
||||
const payload = asRecord(body);
|
||||
const ticker = typeof payload.ticker === 'string' ? payload.ticker.trim().toUpperCase() : '';
|
||||
const query = typeof payload.query === 'string' ? payload.query.trim() : '';
|
||||
const memoSection = asResearchMemoSection(payload.memoSection);
|
||||
|
||||
if (!ticker) {
|
||||
return jsonError('ticker is required');
|
||||
}
|
||||
|
||||
if (!query) {
|
||||
return jsonError('query is required');
|
||||
}
|
||||
|
||||
try {
|
||||
const result = await runResearchCopilotTurn({
|
||||
userId: session.user.id,
|
||||
ticker,
|
||||
query,
|
||||
selectedSources: asSearchSources(payload.sources),
|
||||
pinnedArtifactIds: Array.isArray(payload.pinnedArtifactIds)
|
||||
? payload.pinnedArtifactIds.map((entry) => Number(entry)).filter((entry) => Number.isInteger(entry) && entry > 0)
|
||||
: undefined,
|
||||
memoSection
|
||||
});
|
||||
|
||||
return Response.json(result);
|
||||
} catch (error) {
|
||||
return jsonError(asErrorMessage(error, 'Unable to run research copilot turn'));
|
||||
}
|
||||
}, {
|
||||
body: t.Object({
|
||||
ticker: t.String({ minLength: 1 }),
|
||||
query: t.String({ minLength: 1 }),
|
||||
sources: t.Optional(t.Union([t.String(), t.Array(t.String())])),
|
||||
pinnedArtifactIds: t.Optional(t.Array(t.Numeric())),
|
||||
memoSection: t.Optional(t.String())
|
||||
})
|
||||
})
|
||||
.post('/research/copilot/job', async ({ body }) => {
|
||||
const { session, response } = await requireAuthenticatedSession();
|
||||
if (response) {
|
||||
return response;
|
||||
}
|
||||
|
||||
const payload = asRecord(body);
|
||||
const ticker = typeof payload.ticker === 'string' ? payload.ticker.trim().toUpperCase() : '';
|
||||
const query = typeof payload.query === 'string' ? payload.query.trim() : '';
|
||||
|
||||
if (!ticker) {
|
||||
return jsonError('ticker is required');
|
||||
}
|
||||
|
||||
if (!query) {
|
||||
return jsonError('query is required');
|
||||
}
|
||||
|
||||
try {
|
||||
const resourceKey = `research_brief:${ticker}:${query.toLowerCase()}`;
|
||||
const existing = await findInFlightTask(session.user.id, 'research_brief', resourceKey);
|
||||
if (existing) {
|
||||
return Response.json({ task: existing });
|
||||
}
|
||||
|
||||
const task = await enqueueTask({
|
||||
userId: session.user.id,
|
||||
taskType: 'research_brief',
|
||||
payload: {
|
||||
ticker,
|
||||
query,
|
||||
sources: asSearchSources(payload.sources) ?? SEARCH_SOURCES
|
||||
},
|
||||
priority: 55,
|
||||
resourceKey
|
||||
});
|
||||
|
||||
return Response.json({ task });
|
||||
} catch (error) {
|
||||
return jsonError(asErrorMessage(error, 'Unable to queue research brief'));
|
||||
}
|
||||
}, {
|
||||
body: t.Object({
|
||||
ticker: t.String({ minLength: 1 }),
|
||||
query: t.String({ minLength: 1 }),
|
||||
sources: t.Optional(t.Union([t.String(), t.Array(t.String())]))
|
||||
})
|
||||
})
|
||||
.get('/research/library', async ({ query }) => {
|
||||
const { session, response } = await requireAuthenticatedSession();
|
||||
if (response) {
|
||||
|
||||
219
lib/server/api/research-copilot.e2e.test.ts
Normal file
219
lib/server/api/research-copilot.e2e.test.ts
Normal file
@@ -0,0 +1,219 @@
|
||||
import { beforeAll, describe, expect, it, mock } from 'bun:test';
|
||||
|
||||
const TEST_USER_ID = 'copilot-api-user';
|
||||
|
||||
const mockGetSession = mock(async () => ({
|
||||
id: 1,
|
||||
user_id: TEST_USER_ID,
|
||||
ticker: 'NVDA',
|
||||
title: 'NVDA copilot',
|
||||
selected_sources: ['documents', 'filings', 'research'],
|
||||
pinned_artifact_ids: [],
|
||||
created_at: '2026-03-14T00:00:00.000Z',
|
||||
updated_at: '2026-03-14T00:00:00.000Z',
|
||||
messages: []
|
||||
}));
|
||||
|
||||
const mockRunTurn = mock(async () => ({
|
||||
session: {
|
||||
id: 1,
|
||||
user_id: TEST_USER_ID,
|
||||
ticker: 'NVDA',
|
||||
title: 'NVDA copilot',
|
||||
selected_sources: ['filings'],
|
||||
pinned_artifact_ids: [4],
|
||||
created_at: '2026-03-14T00:00:00.000Z',
|
||||
updated_at: '2026-03-14T00:00:01.000Z',
|
||||
messages: []
|
||||
},
|
||||
user_message: {
|
||||
id: 1,
|
||||
session_id: 1,
|
||||
user_id: TEST_USER_ID,
|
||||
role: 'user',
|
||||
content_markdown: 'What changed?',
|
||||
citations: [],
|
||||
follow_ups: [],
|
||||
suggested_actions: [],
|
||||
selected_sources: ['filings'],
|
||||
pinned_artifact_ids: [4],
|
||||
memo_section: 'thesis',
|
||||
created_at: '2026-03-14T00:00:00.000Z'
|
||||
},
|
||||
assistant_message: {
|
||||
id: 2,
|
||||
session_id: 1,
|
||||
user_id: TEST_USER_ID,
|
||||
role: 'assistant',
|
||||
content_markdown: 'Demand stayed strong [1].',
|
||||
citations: [{
|
||||
index: 1,
|
||||
label: 'NVDA · 0001 [1]',
|
||||
chunkId: 1,
|
||||
href: '/analysis/reports/NVDA/0001',
|
||||
source: 'filings',
|
||||
sourceKind: 'filing_brief',
|
||||
sourceRef: '0001',
|
||||
title: '10-K brief',
|
||||
ticker: 'NVDA',
|
||||
accessionNumber: '0001',
|
||||
filingDate: '2026-02-18',
|
||||
excerpt: 'Demand stayed strong.',
|
||||
artifactId: 5
|
||||
}],
|
||||
follow_ups: ['What changed in risks?'],
|
||||
suggested_actions: [],
|
||||
selected_sources: ['filings'],
|
||||
pinned_artifact_ids: [4],
|
||||
memo_section: 'thesis',
|
||||
created_at: '2026-03-14T00:00:01.000Z'
|
||||
},
|
||||
results: []
|
||||
}));
|
||||
|
||||
const mockGenerateBrief = mock(async () => ({
|
||||
provider: 'test',
|
||||
model: 'test-model',
|
||||
bodyMarkdown: '# NVDA brief\n\nDemand held up.',
|
||||
evidence: []
|
||||
}));
|
||||
|
||||
const mockFindInFlightTask = mock(async () => null);
|
||||
const mockEnqueueTask = mock(async () => ({
|
||||
id: 'task-1',
|
||||
user_id: TEST_USER_ID,
|
||||
task_type: 'research_brief',
|
||||
status: 'queued',
|
||||
stage: 'queued',
|
||||
stage_detail: 'Queued',
|
||||
stage_context: null,
|
||||
resource_key: 'research_brief:NVDA:update the thesis',
|
||||
notification_read_at: null,
|
||||
notification_silenced_at: null,
|
||||
priority: 55,
|
||||
payload: {
|
||||
ticker: 'NVDA',
|
||||
query: 'Update the thesis',
|
||||
sources: ['filings']
|
||||
},
|
||||
result: null,
|
||||
error: null,
|
||||
attempts: 0,
|
||||
max_attempts: 3,
|
||||
workflow_run_id: 'run-1',
|
||||
created_at: '2026-03-14T00:00:00.000Z',
|
||||
updated_at: '2026-03-14T00:00:00.000Z',
|
||||
finished_at: null
|
||||
}));
|
||||
|
||||
function registerMocks() {
|
||||
mock.module('@/lib/server/auth-session', () => ({
|
||||
requireAuthenticatedSession: async () => ({
|
||||
session: {
|
||||
user: {
|
||||
id: TEST_USER_ID,
|
||||
email: 'copilot@example.com',
|
||||
name: 'Copilot API User',
|
||||
image: null
|
||||
}
|
||||
},
|
||||
response: null
|
||||
})
|
||||
}));
|
||||
|
||||
mock.module('@/lib/server/repos/research-copilot', () => ({
|
||||
getResearchCopilotSessionByTicker: mockGetSession
|
||||
}));
|
||||
|
||||
mock.module('@/lib/server/research-copilot', () => ({
|
||||
runResearchCopilotTurn: mockRunTurn,
|
||||
generateResearchBrief: mockGenerateBrief
|
||||
}));
|
||||
|
||||
mock.module('@/lib/server/tasks', () => ({
|
||||
enqueueTask: mockEnqueueTask,
|
||||
findInFlightTask: mockFindInFlightTask,
|
||||
getTaskById: mock(async () => null),
|
||||
getTaskQueueSnapshot: mock(async () => ({ items: [], stats: { queued: 0, running: 0, failed: 0 } })),
|
||||
getTaskTimeline: mock(async () => []),
|
||||
listRecentTasks: mock(async () => []),
|
||||
updateTaskNotification: mock(async () => null)
|
||||
}));
|
||||
}
|
||||
|
||||
describe('research copilot api', () => {
|
||||
let app: { handle: (request: Request) => Promise<Response> };
|
||||
|
||||
beforeAll(async () => {
|
||||
mock.restore();
|
||||
registerMocks();
|
||||
({ app } = await import('./app'));
|
||||
});
|
||||
|
||||
it('returns the ticker-scoped session payload', async () => {
|
||||
const response = await app.handle(new Request('http://localhost/api/research/copilot/session?ticker=nvda'));
|
||||
expect(response.status).toBe(200);
|
||||
const payload = await response.json() as { session: { ticker: string } };
|
||||
expect(payload.session.ticker).toBe('NVDA');
|
||||
expect(mockGetSession).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('returns turn responses with assistant citations', async () => {
|
||||
const response = await app.handle(new Request('http://localhost/api/research/copilot/turn', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'content-type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify({
|
||||
ticker: 'nvda',
|
||||
query: 'What changed?',
|
||||
sources: ['filings'],
|
||||
pinnedArtifactIds: [4],
|
||||
memoSection: 'thesis'
|
||||
})
|
||||
}));
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
const payload = await response.json() as {
|
||||
assistant_message: {
|
||||
citations: Array<{ artifactId: number | null }>;
|
||||
};
|
||||
};
|
||||
|
||||
expect(payload.assistant_message.citations[0]?.artifactId).toBe(5);
|
||||
expect(mockRunTurn).toHaveBeenCalled();
|
||||
});
|
||||
|
||||
it('queues research brief jobs with normalized ticker payloads', async () => {
|
||||
const response = await app.handle(new Request('http://localhost/api/research/copilot/job', {
|
||||
method: 'POST',
|
||||
headers: {
|
||||
'content-type': 'application/json'
|
||||
},
|
||||
body: JSON.stringify({
|
||||
ticker: 'nvda',
|
||||
query: 'Update the thesis',
|
||||
sources: ['filings']
|
||||
})
|
||||
}));
|
||||
|
||||
expect(response.status).toBe(200);
|
||||
const payload = await response.json() as {
|
||||
task: {
|
||||
task_type: string;
|
||||
payload: {
|
||||
ticker: string;
|
||||
};
|
||||
};
|
||||
};
|
||||
|
||||
expect(payload.task.task_type).toBe('research_brief');
|
||||
expect(payload.task.payload.ticker).toBe('NVDA');
|
||||
expect(mockFindInFlightTask).toHaveBeenCalledWith(
|
||||
TEST_USER_ID,
|
||||
'research_brief',
|
||||
'research_brief:NVDA:update the thesis'
|
||||
);
|
||||
expect(mockEnqueueTask).toHaveBeenCalled();
|
||||
});
|
||||
});
|
||||
@@ -77,7 +77,6 @@ function loadSqliteExtensions(client: Database) {
|
||||
function isVectorExtensionLoaded(client: Database) {
|
||||
return vectorExtensionStatus.get(client) ?? false;
|
||||
}
|
||||
|
||||
function ensureSearchVirtualTables(client: Database) {
|
||||
client.exec(`
|
||||
CREATE VIRTUAL TABLE IF NOT EXISTS \`search_chunk_fts\` USING fts5(
|
||||
|
||||
@@ -44,10 +44,12 @@ type ResearchMemoSection =
|
||||
| 'risks'
|
||||
| 'disconfirming_evidence'
|
||||
| 'next_actions';
|
||||
type SearchSource = 'documents' | 'filings' | 'research';
|
||||
type FinancialCadence = 'annual' | 'quarterly' | 'ltm';
|
||||
type SearchDocumentScope = 'global' | 'user';
|
||||
type SearchDocumentSourceKind = 'filing_document' | 'filing_brief' | 'research_note';
|
||||
type SearchIndexStatus = 'pending' | 'indexed' | 'failed';
|
||||
type ResearchCopilotMessageRole = 'user' | 'assistant';
|
||||
type FinancialSurfaceKind =
|
||||
| 'income_statement'
|
||||
| 'balance_sheet'
|
||||
@@ -636,7 +638,7 @@ export const filingLink = sqliteTable('filing_link', {
|
||||
export const taskRun = sqliteTable('task_run', {
|
||||
id: text('id').primaryKey().notNull(),
|
||||
user_id: text('user_id').notNull().references(() => user.id, { onDelete: 'cascade' }),
|
||||
task_type: text('task_type').$type<'sync_filings' | 'refresh_prices' | 'analyze_filing' | 'portfolio_insights' | 'index_search'>().notNull(),
|
||||
task_type: text('task_type').$type<'sync_filings' | 'refresh_prices' | 'analyze_filing' | 'portfolio_insights' | 'index_search' | 'research_brief'>().notNull(),
|
||||
status: text('status').$type<'queued' | 'running' | 'completed' | 'failed'>().notNull(),
|
||||
stage: text('stage').notNull(),
|
||||
stage_detail: text('stage_detail'),
|
||||
@@ -824,6 +826,38 @@ export const researchMemoEvidence = sqliteTable('research_memo_evidence', {
|
||||
researchMemoEvidenceUnique: uniqueIndex('research_memo_evidence_unique_uidx').on(table.memo_id, table.artifact_id, table.section)
|
||||
}));
|
||||
|
||||
export const researchCopilotSession = sqliteTable('research_copilot_session', {
|
||||
id: integer('id').primaryKey({ autoIncrement: true }),
|
||||
user_id: text('user_id').notNull().references(() => user.id, { onDelete: 'cascade' }),
|
||||
ticker: text('ticker').notNull(),
|
||||
title: text('title'),
|
||||
selected_sources: text('selected_sources', { mode: 'json' }).$type<SearchSource[]>().notNull(),
|
||||
pinned_artifact_ids: text('pinned_artifact_ids', { mode: 'json' }).$type<number[]>().notNull(),
|
||||
created_at: text('created_at').notNull(),
|
||||
updated_at: text('updated_at').notNull()
|
||||
}, (table) => ({
|
||||
researchCopilotSessionTickerUnique: uniqueIndex('research_copilot_session_ticker_uidx').on(table.user_id, table.ticker),
|
||||
researchCopilotSessionUpdatedIndex: index('research_copilot_session_updated_idx').on(table.user_id, table.updated_at)
|
||||
}));
|
||||
|
||||
export const researchCopilotMessage = sqliteTable('research_copilot_message', {
|
||||
id: integer('id').primaryKey({ autoIncrement: true }),
|
||||
session_id: integer('session_id').notNull().references(() => researchCopilotSession.id, { onDelete: 'cascade' }),
|
||||
user_id: text('user_id').notNull().references(() => user.id, { onDelete: 'cascade' }),
|
||||
role: text('role').$type<ResearchCopilotMessageRole>().notNull(),
|
||||
content_markdown: text('content_markdown').notNull(),
|
||||
citations: text('citations', { mode: 'json' }).$type<Record<string, unknown>[] | null>(),
|
||||
follow_ups: text('follow_ups', { mode: 'json' }).$type<string[] | null>(),
|
||||
suggested_actions: text('suggested_actions', { mode: 'json' }).$type<Record<string, unknown>[] | null>(),
|
||||
selected_sources: text('selected_sources', { mode: 'json' }).$type<SearchSource[] | null>(),
|
||||
pinned_artifact_ids: text('pinned_artifact_ids', { mode: 'json' }).$type<number[] | null>(),
|
||||
memo_section: text('memo_section').$type<ResearchMemoSection | null>(),
|
||||
created_at: text('created_at').notNull()
|
||||
}, (table) => ({
|
||||
researchCopilotMessageSessionIndex: index('research_copilot_message_session_idx').on(table.session_id, table.created_at),
|
||||
researchCopilotMessageUserIndex: index('research_copilot_message_user_idx').on(table.user_id, table.created_at)
|
||||
}));
|
||||
|
||||
export const authSchema = {
|
||||
user,
|
||||
session,
|
||||
@@ -855,7 +889,9 @@ export const appSchema = {
|
||||
searchChunk,
|
||||
researchArtifact,
|
||||
researchMemo,
|
||||
researchMemoEvidence
|
||||
researchMemoEvidence,
|
||||
researchCopilotSession,
|
||||
researchCopilotMessage
|
||||
};
|
||||
|
||||
export const schema = {
|
||||
|
||||
@@ -296,6 +296,50 @@ function ensureResearchWorkspaceSchema(client: Database) {
|
||||
`);
|
||||
}
|
||||
|
||||
function ensureResearchCopilotSchema(client: Database) {
|
||||
if (!hasTable(client, 'research_copilot_session')) {
|
||||
client.exec(`
|
||||
CREATE TABLE IF NOT EXISTS \`research_copilot_session\` (
|
||||
\`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
\`user_id\` text NOT NULL,
|
||||
\`ticker\` text NOT NULL,
|
||||
\`title\` text,
|
||||
\`selected_sources\` text NOT NULL DEFAULT '["documents","filings","research"]',
|
||||
\`pinned_artifact_ids\` text NOT NULL DEFAULT '[]',
|
||||
\`created_at\` text NOT NULL,
|
||||
\`updated_at\` text NOT NULL,
|
||||
FOREIGN KEY (\`user_id\`) REFERENCES \`user\`(\`id\`) ON UPDATE no action ON DELETE cascade
|
||||
);
|
||||
`);
|
||||
}
|
||||
|
||||
if (!hasTable(client, 'research_copilot_message')) {
|
||||
client.exec(`
|
||||
CREATE TABLE IF NOT EXISTS \`research_copilot_message\` (
|
||||
\`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
\`session_id\` integer NOT NULL,
|
||||
\`user_id\` text NOT NULL,
|
||||
\`role\` text NOT NULL,
|
||||
\`content_markdown\` text NOT NULL,
|
||||
\`citations\` text,
|
||||
\`follow_ups\` text,
|
||||
\`suggested_actions\` text,
|
||||
\`selected_sources\` text,
|
||||
\`pinned_artifact_ids\` text,
|
||||
\`memo_section\` text,
|
||||
\`created_at\` text NOT NULL,
|
||||
FOREIGN KEY (\`session_id\`) REFERENCES \`research_copilot_session\`(\`id\`) ON UPDATE no action ON DELETE cascade,
|
||||
FOREIGN KEY (\`user_id\`) REFERENCES \`user\`(\`id\`) ON UPDATE no action ON DELETE cascade
|
||||
);
|
||||
`);
|
||||
}
|
||||
|
||||
client.exec('CREATE UNIQUE INDEX IF NOT EXISTS `research_copilot_session_ticker_uidx` ON `research_copilot_session` (`user_id`, `ticker`);');
|
||||
client.exec('CREATE INDEX IF NOT EXISTS `research_copilot_session_updated_idx` ON `research_copilot_session` (`user_id`, `updated_at`);');
|
||||
client.exec('CREATE INDEX IF NOT EXISTS `research_copilot_message_session_idx` ON `research_copilot_message` (`session_id`, `created_at`);');
|
||||
client.exec('CREATE INDEX IF NOT EXISTS `research_copilot_message_user_idx` ON `research_copilot_message` (`user_id`, `created_at`);');
|
||||
}
|
||||
|
||||
const TAXONOMY_SNAPSHOT_REQUIRED_COLUMNS = [
|
||||
'parser_engine',
|
||||
'parser_version',
|
||||
@@ -548,6 +592,7 @@ WHERE resource_key IS NOT NULL AND status IN ('queued', 'running');`);
|
||||
}
|
||||
|
||||
ensureResearchWorkspaceSchema(client);
|
||||
ensureResearchCopilotSchema(client);
|
||||
}
|
||||
|
||||
export const __sqliteSchemaCompatInternals = {
|
||||
|
||||
165
lib/server/repos/research-copilot.test.ts
Normal file
165
lib/server/repos/research-copilot.test.ts
Normal file
@@ -0,0 +1,165 @@
|
||||
import {
|
||||
afterAll,
|
||||
beforeAll,
|
||||
beforeEach,
|
||||
describe,
|
||||
expect,
|
||||
it
|
||||
} from 'bun:test';
|
||||
import { mock } from 'bun:test';
|
||||
import { mkdtempSync, readFileSync, rmSync } from 'node:fs';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
import { Database } from 'bun:sqlite';
|
||||
|
||||
const TEST_USER_ID = 'copilot-user';
|
||||
|
||||
let tempDir: string | null = null;
|
||||
let sqliteClient: Database | null = null;
|
||||
let copilotRepo: typeof import('./research-copilot') | null = null;
|
||||
|
||||
async function loadRepoModule() {
|
||||
const moduleUrl = new URL(`./research-copilot.ts?test=${Date.now()}`, import.meta.url).href;
|
||||
return await import(moduleUrl) as typeof import('./research-copilot');
|
||||
}
|
||||
|
||||
function resetDbSingletons() {
|
||||
const globalState = globalThis as typeof globalThis & {
|
||||
__fiscalSqliteClient?: Database;
|
||||
__fiscalDrizzleDb?: unknown;
|
||||
};
|
||||
|
||||
globalState.__fiscalSqliteClient?.close();
|
||||
globalState.__fiscalSqliteClient = undefined;
|
||||
globalState.__fiscalDrizzleDb = undefined;
|
||||
}
|
||||
|
||||
function applyMigration(client: Database, fileName: string) {
|
||||
const sql = readFileSync(join(process.cwd(), 'drizzle', fileName), 'utf8');
|
||||
client.exec(sql);
|
||||
}
|
||||
|
||||
function ensureUser(client: Database) {
|
||||
const now = Date.now();
|
||||
client.exec(`
|
||||
INSERT OR REPLACE INTO user (id, name, email, emailVerified, image, createdAt, updatedAt, role, banned, banReason, banExpires)
|
||||
VALUES ('${TEST_USER_ID}', 'Copilot User', 'copilot@example.com', 1, NULL, ${now}, ${now}, NULL, 0, NULL, NULL);
|
||||
`);
|
||||
}
|
||||
|
||||
describe('research copilot repo', () => {
|
||||
beforeAll(async () => {
|
||||
mock.restore();
|
||||
tempDir = mkdtempSync(join(tmpdir(), 'fiscal-copilot-repo-'));
|
||||
process.env.DATABASE_URL = `file:${join(tempDir, 'repo.sqlite')}`;
|
||||
(process.env as Record<string, string | undefined>).NODE_ENV = 'test';
|
||||
|
||||
resetDbSingletons();
|
||||
sqliteClient = new Database(join(tempDir, 'repo.sqlite'), { create: true });
|
||||
sqliteClient.exec('PRAGMA foreign_keys = ON;');
|
||||
applyMigration(sqliteClient, '0000_cold_silver_centurion.sql');
|
||||
applyMigration(sqliteClient, '0008_research_workspace.sql');
|
||||
applyMigration(sqliteClient, '0013_research_copilot.sql');
|
||||
ensureUser(sqliteClient);
|
||||
|
||||
const globalState = globalThis as typeof globalThis & {
|
||||
__fiscalSqliteClient?: Database;
|
||||
__fiscalDrizzleDb?: unknown;
|
||||
};
|
||||
globalState.__fiscalSqliteClient = sqliteClient;
|
||||
globalState.__fiscalDrizzleDb = undefined;
|
||||
|
||||
copilotRepo = await loadRepoModule();
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
mock.restore();
|
||||
sqliteClient?.close();
|
||||
resetDbSingletons();
|
||||
if (tempDir) {
|
||||
rmSync(tempDir, { recursive: true, force: true });
|
||||
}
|
||||
});
|
||||
|
||||
beforeEach(() => {
|
||||
sqliteClient?.exec('DELETE FROM research_copilot_message;');
|
||||
sqliteClient?.exec('DELETE FROM research_copilot_session;');
|
||||
});
|
||||
|
||||
it('creates and reloads ticker-scoped sessions', async () => {
|
||||
if (!copilotRepo) {
|
||||
throw new Error('repo not initialized');
|
||||
}
|
||||
|
||||
const session = await copilotRepo.getOrCreateResearchCopilotSession({
|
||||
userId: TEST_USER_ID,
|
||||
ticker: 'msft',
|
||||
selectedSources: ['documents', 'research'],
|
||||
pinnedArtifactIds: [2, 2, 5]
|
||||
});
|
||||
|
||||
const loaded = await copilotRepo.getResearchCopilotSessionByTicker(TEST_USER_ID, 'MSFT');
|
||||
|
||||
expect(session.ticker).toBe('MSFT');
|
||||
expect(session.selected_sources).toEqual(['documents', 'research']);
|
||||
expect(session.pinned_artifact_ids).toEqual([2, 5]);
|
||||
expect(loaded?.id).toBe(session.id);
|
||||
});
|
||||
|
||||
it('appends messages and updates session state', async () => {
|
||||
if (!copilotRepo) {
|
||||
throw new Error('repo not initialized');
|
||||
}
|
||||
|
||||
const session = await copilotRepo.getOrCreateResearchCopilotSession({
|
||||
userId: TEST_USER_ID,
|
||||
ticker: 'NVDA'
|
||||
});
|
||||
|
||||
await copilotRepo.appendResearchCopilotMessage({
|
||||
userId: TEST_USER_ID,
|
||||
sessionId: session.id,
|
||||
role: 'user',
|
||||
contentMarkdown: 'What changed in the latest filing?',
|
||||
selectedSources: ['filings'],
|
||||
pinnedArtifactIds: [7],
|
||||
memoSection: 'thesis'
|
||||
});
|
||||
|
||||
await copilotRepo.appendResearchCopilotMessage({
|
||||
userId: TEST_USER_ID,
|
||||
sessionId: session.id,
|
||||
role: 'assistant',
|
||||
contentMarkdown: 'Demand remained strong [1]',
|
||||
citations: [{
|
||||
index: 1,
|
||||
label: 'NVDA 10-K [1]',
|
||||
chunkId: 1,
|
||||
href: '/filings?ticker=NVDA',
|
||||
source: 'filings',
|
||||
sourceKind: 'filing_brief',
|
||||
sourceRef: '0001',
|
||||
title: '10-K brief',
|
||||
ticker: 'NVDA',
|
||||
accessionNumber: '0001',
|
||||
filingDate: '2026-01-01',
|
||||
excerpt: 'Demand remained strong.',
|
||||
artifactId: 3
|
||||
}]
|
||||
});
|
||||
|
||||
const updated = await copilotRepo.upsertResearchCopilotSessionState({
|
||||
userId: TEST_USER_ID,
|
||||
ticker: 'NVDA',
|
||||
title: 'NVDA demand update',
|
||||
selectedSources: ['filings'],
|
||||
pinnedArtifactIds: [7]
|
||||
});
|
||||
|
||||
expect(updated.title).toBe('NVDA demand update');
|
||||
expect(updated.messages).toHaveLength(2);
|
||||
expect(updated.messages[0]?.selected_sources).toEqual(['filings']);
|
||||
expect(updated.messages[0]?.memo_section).toBe('thesis');
|
||||
expect(updated.messages[1]?.citations[0]?.artifactId).toBe(3);
|
||||
});
|
||||
});
|
||||
229
lib/server/repos/research-copilot.ts
Normal file
229
lib/server/repos/research-copilot.ts
Normal file
@@ -0,0 +1,229 @@
|
||||
import { and, asc, eq } from 'drizzle-orm';
|
||||
import type {
|
||||
ResearchCopilotCitation,
|
||||
ResearchCopilotMessage,
|
||||
ResearchCopilotSession,
|
||||
ResearchCopilotSuggestedAction,
|
||||
ResearchMemoSection,
|
||||
SearchSource
|
||||
} from '@/lib/types';
|
||||
import { db } from '@/lib/server/db';
|
||||
import {
|
||||
researchCopilotMessage,
|
||||
researchCopilotSession
|
||||
} from '@/lib/server/db/schema';
|
||||
|
||||
type ResearchCopilotSessionRow = typeof researchCopilotSession.$inferSelect;
|
||||
type ResearchCopilotMessageRow = typeof researchCopilotMessage.$inferSelect;
|
||||
|
||||
const DEFAULT_SELECTED_SOURCES: SearchSource[] = ['documents', 'filings', 'research'];
|
||||
|
||||
function normalizeTicker(ticker: string) {
|
||||
return ticker.trim().toUpperCase();
|
||||
}
|
||||
|
||||
function normalizeSources(value?: SearchSource[] | null) {
|
||||
const unique = new Set<SearchSource>();
|
||||
|
||||
for (const source of value ?? DEFAULT_SELECTED_SOURCES) {
|
||||
if (source === 'documents' || source === 'filings' || source === 'research') {
|
||||
unique.add(source);
|
||||
}
|
||||
}
|
||||
|
||||
return unique.size > 0 ? [...unique] : [...DEFAULT_SELECTED_SOURCES];
|
||||
}
|
||||
|
||||
function normalizePinnedArtifactIds(value?: number[] | null) {
|
||||
const unique = new Set<number>();
|
||||
|
||||
for (const id of value ?? []) {
|
||||
const normalized = Math.trunc(Number(id));
|
||||
if (Number.isInteger(normalized) && normalized > 0) {
|
||||
unique.add(normalized);
|
||||
}
|
||||
}
|
||||
|
||||
return [...unique];
|
||||
}
|
||||
|
||||
function normalizeOptionalString(value?: string | null) {
|
||||
const normalized = value?.trim();
|
||||
return normalized ? normalized : null;
|
||||
}
|
||||
|
||||
function toCitationArray(value: unknown): ResearchCopilotCitation[] {
|
||||
return Array.isArray(value) ? value as ResearchCopilotCitation[] : [];
|
||||
}
|
||||
|
||||
function toActionArray(value: unknown): ResearchCopilotSuggestedAction[] {
|
||||
return Array.isArray(value) ? value as ResearchCopilotSuggestedAction[] : [];
|
||||
}
|
||||
|
||||
function toFollowUps(value: unknown) {
|
||||
return Array.isArray(value)
|
||||
? value.filter((entry): entry is string => typeof entry === 'string' && entry.trim().length > 0)
|
||||
: [];
|
||||
}
|
||||
|
||||
function toMessage(row: ResearchCopilotMessageRow): ResearchCopilotMessage {
|
||||
return {
|
||||
id: row.id,
|
||||
session_id: row.session_id,
|
||||
user_id: row.user_id,
|
||||
role: row.role,
|
||||
content_markdown: row.content_markdown,
|
||||
citations: toCitationArray(row.citations),
|
||||
follow_ups: toFollowUps(row.follow_ups),
|
||||
suggested_actions: toActionArray(row.suggested_actions),
|
||||
selected_sources: normalizeSources(row.selected_sources),
|
||||
pinned_artifact_ids: normalizePinnedArtifactIds(row.pinned_artifact_ids),
|
||||
memo_section: row.memo_section ?? null,
|
||||
created_at: row.created_at
|
||||
};
|
||||
}
|
||||
|
||||
function toSession(row: ResearchCopilotSessionRow, messages: ResearchCopilotMessage[]): ResearchCopilotSession {
|
||||
return {
|
||||
id: row.id,
|
||||
user_id: row.user_id,
|
||||
ticker: row.ticker,
|
||||
title: row.title ?? null,
|
||||
selected_sources: normalizeSources(row.selected_sources),
|
||||
pinned_artifact_ids: normalizePinnedArtifactIds(row.pinned_artifact_ids),
|
||||
created_at: row.created_at,
|
||||
updated_at: row.updated_at,
|
||||
messages
|
||||
};
|
||||
}
|
||||
|
||||
async function listMessagesForSession(sessionId: number) {
|
||||
const rows = await db
|
||||
.select()
|
||||
.from(researchCopilotMessage)
|
||||
.where(eq(researchCopilotMessage.session_id, sessionId))
|
||||
.orderBy(asc(researchCopilotMessage.created_at), asc(researchCopilotMessage.id));
|
||||
|
||||
return rows.map(toMessage);
|
||||
}
|
||||
|
||||
async function getSessionRowByTicker(userId: string, ticker: string) {
|
||||
const [row] = await db
|
||||
.select()
|
||||
.from(researchCopilotSession)
|
||||
.where(and(
|
||||
eq(researchCopilotSession.user_id, userId),
|
||||
eq(researchCopilotSession.ticker, normalizeTicker(ticker))
|
||||
))
|
||||
.limit(1);
|
||||
|
||||
return row ?? null;
|
||||
}
|
||||
|
||||
export async function getResearchCopilotSessionByTicker(userId: string, ticker: string) {
|
||||
const row = await getSessionRowByTicker(userId, ticker);
|
||||
if (!row) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return toSession(row, await listMessagesForSession(row.id));
|
||||
}
|
||||
|
||||
export async function getOrCreateResearchCopilotSession(input: {
|
||||
userId: string;
|
||||
ticker: string;
|
||||
title?: string | null;
|
||||
selectedSources?: SearchSource[] | null;
|
||||
pinnedArtifactIds?: number[] | null;
|
||||
}) {
|
||||
const normalizedTicker = normalizeTicker(input.ticker);
|
||||
if (!normalizedTicker) {
|
||||
throw new Error('ticker is required');
|
||||
}
|
||||
|
||||
const existing = await getSessionRowByTicker(input.userId, normalizedTicker);
|
||||
if (existing) {
|
||||
const messages = await listMessagesForSession(existing.id);
|
||||
return toSession(existing, messages);
|
||||
}
|
||||
|
||||
const now = new Date().toISOString();
|
||||
const [created] = await db
|
||||
.insert(researchCopilotSession)
|
||||
.values({
|
||||
user_id: input.userId,
|
||||
ticker: normalizedTicker,
|
||||
title: normalizeOptionalString(input.title),
|
||||
selected_sources: normalizeSources(input.selectedSources),
|
||||
pinned_artifact_ids: normalizePinnedArtifactIds(input.pinnedArtifactIds),
|
||||
created_at: now,
|
||||
updated_at: now
|
||||
})
|
||||
.returning();
|
||||
|
||||
return toSession(created, []);
|
||||
}
|
||||
|
||||
export async function upsertResearchCopilotSessionState(input: {
|
||||
userId: string;
|
||||
ticker: string;
|
||||
title?: string | null;
|
||||
selectedSources?: SearchSource[] | null;
|
||||
pinnedArtifactIds?: number[] | null;
|
||||
}) {
|
||||
const session = await getOrCreateResearchCopilotSession(input);
|
||||
const [updated] = await db
|
||||
.update(researchCopilotSession)
|
||||
.set({
|
||||
title: input.title === undefined ? session.title : normalizeOptionalString(input.title),
|
||||
selected_sources: input.selectedSources === undefined
|
||||
? session.selected_sources
|
||||
: normalizeSources(input.selectedSources),
|
||||
pinned_artifact_ids: input.pinnedArtifactIds === undefined
|
||||
? session.pinned_artifact_ids
|
||||
: normalizePinnedArtifactIds(input.pinnedArtifactIds),
|
||||
updated_at: new Date().toISOString()
|
||||
})
|
||||
.where(eq(researchCopilotSession.id, session.id))
|
||||
.returning();
|
||||
|
||||
return toSession(updated, await listMessagesForSession(updated.id));
|
||||
}
|
||||
|
||||
export async function appendResearchCopilotMessage(input: {
|
||||
userId: string;
|
||||
sessionId: number;
|
||||
role: ResearchCopilotMessage['role'];
|
||||
contentMarkdown: string;
|
||||
citations?: ResearchCopilotCitation[] | null;
|
||||
followUps?: string[] | null;
|
||||
suggestedActions?: ResearchCopilotSuggestedAction[] | null;
|
||||
selectedSources?: SearchSource[] | null;
|
||||
pinnedArtifactIds?: number[] | null;
|
||||
memoSection?: ResearchMemoSection | null;
|
||||
}) {
|
||||
const now = new Date().toISOString();
|
||||
const [created] = await db
|
||||
.insert(researchCopilotMessage)
|
||||
.values({
|
||||
session_id: input.sessionId,
|
||||
user_id: input.userId,
|
||||
role: input.role,
|
||||
content_markdown: input.contentMarkdown.trim(),
|
||||
citations: input.citations ?? [],
|
||||
follow_ups: input.followUps ?? [],
|
||||
suggested_actions: input.suggestedActions ?? [],
|
||||
selected_sources: input.selectedSources ? normalizeSources(input.selectedSources) : null,
|
||||
pinned_artifact_ids: input.pinnedArtifactIds ? normalizePinnedArtifactIds(input.pinnedArtifactIds) : null,
|
||||
memo_section: input.memoSection ?? null,
|
||||
created_at: now
|
||||
})
|
||||
.returning();
|
||||
|
||||
await db
|
||||
.update(researchCopilotSession)
|
||||
.set({ updated_at: now })
|
||||
.where(eq(researchCopilotSession.id, input.sessionId));
|
||||
|
||||
return toMessage(created);
|
||||
}
|
||||
@@ -25,6 +25,7 @@ import {
|
||||
researchMemo,
|
||||
researchMemoEvidence
|
||||
} from '@/lib/server/db/schema';
|
||||
import { getResearchCopilotSessionByTicker } from '@/lib/server/repos/research-copilot';
|
||||
import { getFilingByAccession, listFilingsRecords } from '@/lib/server/repos/filings';
|
||||
import { getWatchlistItemByTicker } from '@/lib/server/repos/watchlist';
|
||||
|
||||
@@ -374,6 +375,26 @@ async function getArtifactByIdForUser(id: number, userId: string) {
|
||||
return row ?? null;
|
||||
}
|
||||
|
||||
export async function getResearchArtifactsByIdsForUser(userId: string, ids: number[]) {
|
||||
const normalizedIds = [...new Set(ids.map((id) => Math.trunc(id)).filter((id) => Number.isInteger(id) && id > 0))];
|
||||
if (normalizedIds.length === 0) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const rows = await db
|
||||
.select()
|
||||
.from(researchArtifact)
|
||||
.where(and(
|
||||
eq(researchArtifact.user_id, userId),
|
||||
sql`${researchArtifact.id} in (${sql.join(normalizedIds.map((id) => sql`${id}`), sql`, `)})`
|
||||
));
|
||||
|
||||
const order = new Map(normalizedIds.map((id, index) => [id, index]));
|
||||
return rows
|
||||
.sort((left, right) => (order.get(left.id) ?? Number.MAX_SAFE_INTEGER) - (order.get(right.id) ?? Number.MAX_SAFE_INTEGER))
|
||||
.map((row) => toResearchArtifact(row));
|
||||
}
|
||||
|
||||
async function getMemoByIdForUser(id: number, userId: string) {
|
||||
const [row] = await db
|
||||
.select()
|
||||
@@ -902,12 +923,13 @@ export async function getResearchPacket(userId: string, ticker: string): Promise
|
||||
|
||||
export async function getResearchWorkspace(userId: string, ticker: string): Promise<ResearchWorkspace> {
|
||||
const normalizedTicker = normalizeTicker(ticker);
|
||||
const [coverage, memo, library, packet, latestFiling] = await Promise.all([
|
||||
const [coverage, memo, library, packet, latestFiling, copilotSession] = await Promise.all([
|
||||
getWatchlistItemByTicker(userId, normalizedTicker),
|
||||
getResearchMemoByTicker(userId, normalizedTicker),
|
||||
listResearchArtifacts(userId, { ticker: normalizedTicker, limit: 40 }),
|
||||
getResearchPacket(userId, normalizedTicker),
|
||||
listFilingsRecords({ ticker: normalizedTicker, limit: 1 })
|
||||
listFilingsRecords({ ticker: normalizedTicker, limit: 1 }),
|
||||
getResearchCopilotSessionByTicker(userId, normalizedTicker)
|
||||
]);
|
||||
|
||||
return {
|
||||
@@ -918,7 +940,8 @@ export async function getResearchWorkspace(userId: string, ticker: string): Prom
|
||||
memo,
|
||||
library: library.artifacts,
|
||||
packet,
|
||||
availableTags: library.availableTags
|
||||
availableTags: library.availableTags,
|
||||
copilotSession
|
||||
};
|
||||
}
|
||||
|
||||
@@ -1119,4 +1142,3 @@ export async function getResearchArtifactFileResponse(userId: string, id: number
|
||||
export function rebuildResearchArtifactIndex() {
|
||||
rebuildArtifactSearchIndex();
|
||||
}
|
||||
|
||||
|
||||
69
lib/server/research-copilot-format.test.ts
Normal file
69
lib/server/research-copilot-format.test.ts
Normal file
@@ -0,0 +1,69 @@
|
||||
import { describe, expect, it } from 'bun:test';
|
||||
import type { SearchResult } from '@/lib/types';
|
||||
import {
|
||||
extractJsonObject,
|
||||
parseCopilotResponse
|
||||
} from '@/lib/server/research-copilot-format';
|
||||
|
||||
function result(overrides: Partial<SearchResult> = {}): SearchResult {
|
||||
return {
|
||||
chunkId: 1,
|
||||
documentId: 1,
|
||||
source: 'filings',
|
||||
sourceKind: 'filing_brief',
|
||||
sourceRef: '0001',
|
||||
title: '10-K brief',
|
||||
ticker: 'NVDA',
|
||||
accessionNumber: '0001',
|
||||
filingDate: '2026-02-18',
|
||||
citationLabel: 'NVDA · 0001 [1]',
|
||||
headingPath: null,
|
||||
chunkText: 'Demand stayed strong and margins expanded.',
|
||||
snippet: 'Demand stayed strong and margins expanded.',
|
||||
score: 0.9,
|
||||
vectorRank: 1,
|
||||
lexicalRank: 1,
|
||||
href: '/analysis/reports/NVDA/0001',
|
||||
...overrides
|
||||
};
|
||||
}
|
||||
|
||||
describe('research copilot format helpers', () => {
|
||||
it('parses strict json responses with suggested actions', () => {
|
||||
const parsed = parseCopilotResponse(JSON.stringify({
|
||||
answerMarkdown: 'Demand stayed strong [1]. The setup still looks constructive [2].',
|
||||
followUps: ['What disconfirms the bull case?', 'Which risks changed most?'],
|
||||
suggestedActions: [{
|
||||
type: 'draft_memo_section',
|
||||
label: 'Use as thesis draft',
|
||||
section: 'thesis',
|
||||
contentMarkdown: 'Maintain a constructive stance while monitoring concentration.',
|
||||
citationIndexes: [1, 2]
|
||||
}]
|
||||
}), [result(), result({ chunkId: 2, citationLabel: 'NVDA · 0002 [2]', sourceRef: '0002' })], 'What changed?', 'thesis');
|
||||
|
||||
expect(parsed.citationIndexes).toEqual([1, 2]);
|
||||
expect(parsed.followUps).toHaveLength(2);
|
||||
expect(parsed.suggestedActions[0]?.type).toBe('draft_memo_section');
|
||||
expect(parsed.suggestedActions[0]?.section).toBe('thesis');
|
||||
});
|
||||
|
||||
it('falls back to plain text and default actions when json parsing fails', () => {
|
||||
const parsed = parseCopilotResponse(
|
||||
'Plain text answer without json wrapper',
|
||||
[result(), result({ chunkId: 2, citationLabel: 'NVDA · 0002 [2]', sourceRef: '0002' })],
|
||||
'Summarize the setup',
|
||||
null
|
||||
);
|
||||
|
||||
expect(parsed.answerMarkdown).toContain('Plain text answer');
|
||||
expect(parsed.citationIndexes).toEqual([1, 2]);
|
||||
expect(parsed.suggestedActions.some((action) => action.type === 'draft_note')).toBe(true);
|
||||
expect(parsed.suggestedActions.some((action) => action.type === 'queue_research_brief')).toBe(true);
|
||||
});
|
||||
|
||||
it('extracts the first json object from fenced responses', () => {
|
||||
const extracted = extractJsonObject('```json\n{"answerMarkdown":"A [1]","followUps":[],"suggestedActions":[]}\n```');
|
||||
expect(extracted).toBe('{"answerMarkdown":"A [1]","followUps":[],"suggestedActions":[]}');
|
||||
});
|
||||
});
|
||||
225
lib/server/research-copilot-format.ts
Normal file
225
lib/server/research-copilot-format.ts
Normal file
@@ -0,0 +1,225 @@
|
||||
import { randomUUID } from 'node:crypto';
|
||||
import type {
|
||||
ResearchCopilotSuggestedAction,
|
||||
ResearchMemoSection,
|
||||
SearchResult
|
||||
} from '@/lib/types';
|
||||
|
||||
type ParsedCopilotPayload = {
|
||||
answerMarkdown: string;
|
||||
followUps: string[];
|
||||
suggestedActions: ResearchCopilotSuggestedAction[];
|
||||
citationIndexes: number[];
|
||||
};
|
||||
|
||||
const MAX_FOLLOW_UPS = 4;
|
||||
const MAX_SUGGESTED_ACTIONS = 3;
|
||||
|
||||
function truncate(value: string, maxLength: number) {
|
||||
const normalized = value.trim();
|
||||
if (normalized.length <= maxLength) {
|
||||
return normalized;
|
||||
}
|
||||
|
||||
return `${normalized.slice(0, maxLength - 1).trimEnd()}…`;
|
||||
}
|
||||
|
||||
function buildSessionTitle(query: string) {
|
||||
return truncate(query, 72);
|
||||
}
|
||||
|
||||
export function extractJsonObject(text: string) {
|
||||
const fenced = text.match(/```json\s*([\s\S]*?)```/i)?.[1];
|
||||
if (fenced) {
|
||||
return fenced.trim();
|
||||
}
|
||||
|
||||
const start = text.indexOf('{');
|
||||
const end = text.lastIndexOf('}');
|
||||
if (start >= 0 && end > start) {
|
||||
return text.slice(start, end + 1).trim();
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
|
||||
export function parseCitationIndexes(value: string, evidenceLength: number) {
|
||||
const matches = [...value.matchAll(/\[(\d+)\]/g)];
|
||||
const seen = new Set<number>();
|
||||
const indexes: number[] = [];
|
||||
|
||||
for (const match of matches) {
|
||||
const parsed = Number(match[1]);
|
||||
if (!Number.isInteger(parsed) || parsed < 1 || parsed > evidenceLength || seen.has(parsed)) {
|
||||
continue;
|
||||
}
|
||||
|
||||
seen.add(parsed);
|
||||
indexes.push(parsed);
|
||||
}
|
||||
|
||||
return indexes;
|
||||
}
|
||||
|
||||
function parseStringArray(value: unknown, maxItems: number) {
|
||||
if (!Array.isArray(value)) {
|
||||
return [];
|
||||
}
|
||||
|
||||
return value
|
||||
.filter((entry): entry is string => typeof entry === 'string' && entry.trim().length > 0)
|
||||
.map((entry) => truncate(entry, 220))
|
||||
.slice(0, maxItems);
|
||||
}
|
||||
|
||||
function normalizeSuggestedAction(
|
||||
value: unknown,
|
||||
fallbackQuery: string
|
||||
): ResearchCopilotSuggestedAction | null {
|
||||
if (!value || typeof value !== 'object' || Array.isArray(value)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const candidate = value as Record<string, unknown>;
|
||||
const type = candidate.type;
|
||||
if (type !== 'draft_note' && type !== 'draft_memo_section' && type !== 'queue_research_brief') {
|
||||
return null;
|
||||
}
|
||||
|
||||
const label = typeof candidate.label === 'string' && candidate.label.trim().length > 0
|
||||
? truncate(candidate.label, 80)
|
||||
: type === 'draft_note'
|
||||
? 'Use as note draft'
|
||||
: type === 'draft_memo_section'
|
||||
? 'Use as memo draft'
|
||||
: 'Queue research brief';
|
||||
|
||||
const section = candidate.section === 'thesis'
|
||||
|| candidate.section === 'variant_view'
|
||||
|| candidate.section === 'catalysts'
|
||||
|| candidate.section === 'risks'
|
||||
|| candidate.section === 'disconfirming_evidence'
|
||||
|| candidate.section === 'next_actions'
|
||||
? candidate.section
|
||||
: null;
|
||||
const description = typeof candidate.description === 'string' && candidate.description.trim().length > 0
|
||||
? truncate(candidate.description, 180)
|
||||
: null;
|
||||
const title = typeof candidate.title === 'string' && candidate.title.trim().length > 0
|
||||
? truncate(candidate.title, 120)
|
||||
: null;
|
||||
const contentMarkdown = typeof candidate.contentMarkdown === 'string' && candidate.contentMarkdown.trim().length > 0
|
||||
? candidate.contentMarkdown.trim()
|
||||
: null;
|
||||
const citationIndexes = Array.isArray(candidate.citationIndexes)
|
||||
? candidate.citationIndexes
|
||||
.map((entry) => Math.trunc(Number(entry)))
|
||||
.filter((entry, index, source) => Number.isInteger(entry) && entry > 0 && source.indexOf(entry) === index)
|
||||
: [];
|
||||
const query = typeof candidate.query === 'string' && candidate.query.trim().length > 0
|
||||
? truncate(candidate.query, 180)
|
||||
: type === 'queue_research_brief'
|
||||
? fallbackQuery
|
||||
: null;
|
||||
|
||||
if ((type === 'draft_note' || type === 'draft_memo_section') && !contentMarkdown) {
|
||||
return null;
|
||||
}
|
||||
|
||||
if (type === 'draft_memo_section' && !section) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return {
|
||||
id: randomUUID(),
|
||||
type,
|
||||
label,
|
||||
description,
|
||||
section,
|
||||
title,
|
||||
content_markdown: contentMarkdown,
|
||||
citation_indexes: citationIndexes,
|
||||
query
|
||||
};
|
||||
}
|
||||
|
||||
function buildFallbackActions(query: string, memoSection: ResearchMemoSection | null, answerMarkdown: string) {
|
||||
return [
|
||||
{
|
||||
id: randomUUID(),
|
||||
type: memoSection ? 'draft_memo_section' : 'draft_note',
|
||||
label: memoSection ? 'Use as memo draft' : 'Use as note draft',
|
||||
description: memoSection
|
||||
? `Populate ${memoSection.replace('_', ' ')} with this answer for review.`
|
||||
: 'Populate the note draft editor with this answer for review.',
|
||||
section: memoSection,
|
||||
title: memoSection ? null : buildSessionTitle(query),
|
||||
content_markdown: answerMarkdown,
|
||||
citation_indexes: [],
|
||||
query: null
|
||||
},
|
||||
{
|
||||
id: randomUUID(),
|
||||
type: 'queue_research_brief',
|
||||
label: 'Queue research brief',
|
||||
description: 'Run a background synthesis job and save a longer-form brief to the library.',
|
||||
section: null,
|
||||
title: null,
|
||||
content_markdown: null,
|
||||
citation_indexes: [],
|
||||
query
|
||||
}
|
||||
] satisfies ResearchCopilotSuggestedAction[];
|
||||
}
|
||||
|
||||
export function parseCopilotResponse(
|
||||
rawText: string,
|
||||
evidence: SearchResult[],
|
||||
query: string,
|
||||
memoSection: ResearchMemoSection | null
|
||||
): ParsedCopilotPayload {
|
||||
const jsonText = extractJsonObject(rawText);
|
||||
if (!jsonText) {
|
||||
const answerMarkdown = rawText.trim() || 'Insufficient evidence to answer from the indexed sources.';
|
||||
return {
|
||||
answerMarkdown,
|
||||
followUps: [],
|
||||
suggestedActions: buildFallbackActions(query, memoSection, answerMarkdown),
|
||||
citationIndexes: evidence.slice(0, Math.min(3, evidence.length)).map((_value, index) => index + 1)
|
||||
};
|
||||
}
|
||||
|
||||
try {
|
||||
const parsed = JSON.parse(jsonText) as Record<string, unknown>;
|
||||
const answerMarkdown = typeof parsed.answerMarkdown === 'string' && parsed.answerMarkdown.trim().length > 0
|
||||
? parsed.answerMarkdown.trim()
|
||||
: 'Insufficient evidence to answer from the indexed sources.';
|
||||
const citationIndexes = parseCitationIndexes(answerMarkdown, evidence.length);
|
||||
const followUps = parseStringArray(parsed.followUps, MAX_FOLLOW_UPS);
|
||||
const suggestedActions = Array.isArray(parsed.suggestedActions)
|
||||
? parsed.suggestedActions
|
||||
.map((entry) => normalizeSuggestedAction(entry, query))
|
||||
.filter((entry): entry is ResearchCopilotSuggestedAction => Boolean(entry))
|
||||
.slice(0, MAX_SUGGESTED_ACTIONS)
|
||||
: [];
|
||||
|
||||
return {
|
||||
answerMarkdown,
|
||||
followUps,
|
||||
suggestedActions: suggestedActions.length > 0
|
||||
? suggestedActions
|
||||
: buildFallbackActions(query, memoSection, answerMarkdown),
|
||||
citationIndexes: citationIndexes.length > 0
|
||||
? citationIndexes
|
||||
: evidence.slice(0, Math.min(3, evidence.length)).map((_value, index) => index + 1)
|
||||
};
|
||||
} catch {
|
||||
const answerMarkdown = rawText.trim() || 'Insufficient evidence to answer from the indexed sources.';
|
||||
return {
|
||||
answerMarkdown,
|
||||
followUps: [],
|
||||
suggestedActions: buildFallbackActions(query, memoSection, answerMarkdown),
|
||||
citationIndexes: evidence.slice(0, Math.min(3, evidence.length)).map((_value, index) => index + 1)
|
||||
};
|
||||
}
|
||||
}
|
||||
419
lib/server/research-copilot.ts
Normal file
419
lib/server/research-copilot.ts
Normal file
@@ -0,0 +1,419 @@
|
||||
import type {
|
||||
ResearchCopilotCitation,
|
||||
ResearchCopilotTurnResponse,
|
||||
ResearchMemo,
|
||||
ResearchMemoSection,
|
||||
SearchResult,
|
||||
SearchSource
|
||||
} from '@/lib/types';
|
||||
import { runAiAnalysis } from '@/lib/server/ai';
|
||||
import {
|
||||
extractJsonObject,
|
||||
parseCitationIndexes,
|
||||
parseCopilotResponse
|
||||
} from '@/lib/server/research-copilot-format';
|
||||
import {
|
||||
appendResearchCopilotMessage,
|
||||
getOrCreateResearchCopilotSession,
|
||||
upsertResearchCopilotSessionState
|
||||
} from '@/lib/server/repos/research-copilot';
|
||||
import {
|
||||
createAiReportArtifactFromAccession,
|
||||
createFilingArtifactFromAccession,
|
||||
getResearchArtifactsByIdsForUser,
|
||||
getResearchMemoByTicker
|
||||
} from '@/lib/server/repos/research-library';
|
||||
import { searchKnowledgeBase } from '@/lib/server/search';
|
||||
|
||||
type CopilotTurnInput = {
|
||||
userId: string;
|
||||
ticker: string;
|
||||
query: string;
|
||||
selectedSources?: SearchSource[];
|
||||
pinnedArtifactIds?: number[];
|
||||
memoSection?: ResearchMemoSection | null;
|
||||
};
|
||||
|
||||
const DEFAULT_SELECTED_SOURCES: SearchSource[] = ['documents', 'filings', 'research'];
|
||||
const MAX_HISTORY_MESSAGES = 6;
|
||||
const MAX_CONTEXT_RESULTS = 6;
|
||||
const MAX_CONTEXT_CHARS = 8_000;
|
||||
|
||||
function normalizeTicker(ticker: string) {
|
||||
return ticker.trim().toUpperCase();
|
||||
}
|
||||
|
||||
function normalizeSources(value?: SearchSource[] | null) {
|
||||
const unique = new Set<SearchSource>();
|
||||
for (const source of value ?? DEFAULT_SELECTED_SOURCES) {
|
||||
if (source === 'documents' || source === 'filings' || source === 'research') {
|
||||
unique.add(source);
|
||||
}
|
||||
}
|
||||
|
||||
return unique.size > 0 ? [...unique] : [...DEFAULT_SELECTED_SOURCES];
|
||||
}
|
||||
|
||||
function normalizePinnedArtifactIds(value?: number[] | null) {
|
||||
const unique = new Set<number>();
|
||||
for (const id of value ?? []) {
|
||||
const normalized = Math.trunc(Number(id));
|
||||
if (Number.isInteger(normalized) && normalized > 0) {
|
||||
unique.add(normalized);
|
||||
}
|
||||
}
|
||||
|
||||
return [...unique];
|
||||
}
|
||||
|
||||
function truncate(value: string, maxLength: number) {
|
||||
const normalized = value.trim();
|
||||
if (normalized.length <= maxLength) {
|
||||
return normalized;
|
||||
}
|
||||
|
||||
return `${normalized.slice(0, maxLength - 1).trimEnd()}…`;
|
||||
}
|
||||
|
||||
function buildSessionTitle(query: string) {
|
||||
return truncate(query, 72);
|
||||
}
|
||||
|
||||
function summarizeMemoPosture(memo: ResearchMemo | null) {
|
||||
if (!memo) {
|
||||
return 'No investment memo exists yet.';
|
||||
}
|
||||
|
||||
return JSON.stringify({
|
||||
rating: memo.rating,
|
||||
conviction: memo.conviction,
|
||||
timeHorizonMonths: memo.time_horizon_months,
|
||||
packetTitle: memo.packet_title,
|
||||
packetSubtitle: memo.packet_subtitle
|
||||
});
|
||||
}
|
||||
|
||||
function buildConversationContext(history: Array<{ role: 'user' | 'assistant'; content_markdown: string }>) {
|
||||
if (history.length === 0) {
|
||||
return 'No previous conversation.';
|
||||
}
|
||||
|
||||
return history.map((message) => `${message.role.toUpperCase()}: ${truncate(message.content_markdown, 600)}`).join('\n\n');
|
||||
}
|
||||
|
||||
function buildPinnedArtifactContext(artifacts: Array<{ id: number; title: string | null; summary: string | null; body_markdown: string | null; kind: string }>) {
|
||||
if (artifacts.length === 0) {
|
||||
return 'No pinned artifacts.';
|
||||
}
|
||||
|
||||
return artifacts.map((artifact) => JSON.stringify({
|
||||
id: artifact.id,
|
||||
kind: artifact.kind,
|
||||
title: artifact.title,
|
||||
summary: artifact.summary,
|
||||
body: artifact.body_markdown ? truncate(artifact.body_markdown, 700) : null
|
||||
})).join('\n');
|
||||
}
|
||||
|
||||
function buildEvidence(results: SearchResult[]) {
|
||||
const evidence: SearchResult[] = [];
|
||||
let totalChars = 0;
|
||||
|
||||
for (const result of results) {
|
||||
if (evidence.length >= MAX_CONTEXT_RESULTS) {
|
||||
break;
|
||||
}
|
||||
|
||||
if (totalChars + result.chunkText.length > MAX_CONTEXT_CHARS && evidence.length > 0) {
|
||||
break;
|
||||
}
|
||||
|
||||
evidence.push(result);
|
||||
totalChars += result.chunkText.length;
|
||||
}
|
||||
|
||||
return evidence;
|
||||
}
|
||||
|
||||
function buildCopilotPrompt(input: {
|
||||
ticker: string;
|
||||
query: string;
|
||||
selectedSources: SearchSource[];
|
||||
memoSection: ResearchMemoSection | null;
|
||||
memo: ResearchMemo | null;
|
||||
history: Array<{ role: 'user' | 'assistant'; content_markdown: string }>;
|
||||
pinnedArtifacts: Array<{ id: number; title: string | null; summary: string | null; body_markdown: string | null; kind: string }>;
|
||||
evidence: SearchResult[];
|
||||
}) {
|
||||
const evidenceText = input.evidence.map((result, index) => ([
|
||||
`[${index + 1}] ${result.citationLabel}`,
|
||||
`Source kind: ${result.sourceKind}`,
|
||||
`Ticker: ${result.ticker ?? 'n/a'}`,
|
||||
`Title: ${result.title ?? result.sourceRef}`,
|
||||
`Excerpt: ${result.chunkText}`
|
||||
].join('\n'))).join('\n\n');
|
||||
|
||||
return [
|
||||
'You are an embedded buy-side company research copilot.',
|
||||
'Use only the supplied evidence. Never use outside knowledge.',
|
||||
'Return strict JSON only with this shape:',
|
||||
'{"answerMarkdown":"string","followUps":["string"],"suggestedActions":[{"type":"draft_note|draft_memo_section|queue_research_brief","label":"string","description":"string|null","section":"thesis|variant_view|catalysts|risks|disconfirming_evidence|next_actions|null","title":"string|null","contentMarkdown":"string|null","citationIndexes":[1],"query":"string|null"}]}',
|
||||
'The answerMarkdown should use inline citations like [1] and [2].',
|
||||
'Suggested actions must be review-first. Never instruct the system to save or mutate automatically.',
|
||||
`Ticker: ${input.ticker}`,
|
||||
`Selected sources: ${input.selectedSources.join(', ')}`,
|
||||
`Target memo section: ${input.memoSection ?? 'none'}`,
|
||||
`Memo posture: ${summarizeMemoPosture(input.memo)}`,
|
||||
`Pinned artifacts:\n${buildPinnedArtifactContext(input.pinnedArtifacts)}`,
|
||||
`Recent conversation:\n${buildConversationContext(input.history)}`,
|
||||
`User question: ${input.query}`,
|
||||
'',
|
||||
'Evidence:',
|
||||
evidenceText
|
||||
].join('\n');
|
||||
}
|
||||
|
||||
async function materializeArtifactIdForResult(userId: string, result: SearchResult) {
|
||||
if (result.sourceKind === 'research_note') {
|
||||
const artifactId = Math.trunc(Number(result.sourceRef));
|
||||
return Number.isInteger(artifactId) && artifactId > 0 ? artifactId : null;
|
||||
}
|
||||
|
||||
if (!result.accessionNumber) {
|
||||
return null;
|
||||
}
|
||||
|
||||
try {
|
||||
if (result.sourceKind === 'filing_brief') {
|
||||
return (await createAiReportArtifactFromAccession(userId, result.accessionNumber)).id;
|
||||
}
|
||||
|
||||
return (await createFilingArtifactFromAccession(userId, result.accessionNumber)).id;
|
||||
} catch {
|
||||
if (result.sourceKind === 'filing_brief') {
|
||||
try {
|
||||
return (await createFilingArtifactFromAccession(userId, result.accessionNumber)).id;
|
||||
} catch {
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
return null;
|
||||
}
|
||||
}
|
||||
|
||||
async function buildCopilotCitations(userId: string, evidence: SearchResult[], citationIndexes: number[]) {
|
||||
const citations: ResearchCopilotCitation[] = [];
|
||||
|
||||
for (const index of citationIndexes) {
|
||||
const result = evidence[index - 1];
|
||||
if (!result) {
|
||||
continue;
|
||||
}
|
||||
|
||||
citations.push({
|
||||
index,
|
||||
label: result.citationLabel,
|
||||
chunkId: result.chunkId,
|
||||
href: result.href,
|
||||
source: result.source,
|
||||
sourceKind: result.sourceKind,
|
||||
sourceRef: result.sourceRef,
|
||||
title: result.title,
|
||||
ticker: result.ticker,
|
||||
accessionNumber: result.accessionNumber,
|
||||
filingDate: result.filingDate,
|
||||
excerpt: result.snippet || truncate(result.chunkText, 280),
|
||||
artifactId: await materializeArtifactIdForResult(userId, result)
|
||||
});
|
||||
}
|
||||
|
||||
return citations;
|
||||
}
|
||||
|
||||
export async function runResearchCopilotTurn(input: CopilotTurnInput): Promise<ResearchCopilotTurnResponse> {
|
||||
const ticker = normalizeTicker(input.ticker);
|
||||
const query = input.query.trim();
|
||||
if (!ticker) {
|
||||
throw new Error('ticker is required');
|
||||
}
|
||||
|
||||
if (!query) {
|
||||
throw new Error('query is required');
|
||||
}
|
||||
|
||||
const selectedSources = normalizeSources(input.selectedSources);
|
||||
const pinnedArtifactIds = normalizePinnedArtifactIds(input.pinnedArtifactIds);
|
||||
const existingSession = await getOrCreateResearchCopilotSession({
|
||||
userId: input.userId,
|
||||
ticker,
|
||||
title: buildSessionTitle(query),
|
||||
selectedSources,
|
||||
pinnedArtifactIds
|
||||
});
|
||||
const memo = await getResearchMemoByTicker(input.userId, ticker);
|
||||
const history = existingSession.messages.slice(-MAX_HISTORY_MESSAGES).map((message) => ({
|
||||
role: message.role,
|
||||
content_markdown: message.content_markdown
|
||||
}));
|
||||
const pinnedArtifacts = await getResearchArtifactsByIdsForUser(input.userId, pinnedArtifactIds);
|
||||
|
||||
const userMessage = await appendResearchCopilotMessage({
|
||||
userId: input.userId,
|
||||
sessionId: existingSession.id,
|
||||
role: 'user',
|
||||
contentMarkdown: query,
|
||||
selectedSources,
|
||||
pinnedArtifactIds,
|
||||
memoSection: input.memoSection ?? null
|
||||
});
|
||||
|
||||
const results = await searchKnowledgeBase({
|
||||
userId: input.userId,
|
||||
query,
|
||||
ticker,
|
||||
sources: selectedSources,
|
||||
limit: 10
|
||||
});
|
||||
const evidence = buildEvidence(results);
|
||||
if (evidence.length === 0) {
|
||||
const answerMarkdown = 'Insufficient evidence to answer from the indexed sources.';
|
||||
const assistantMessage = await appendResearchCopilotMessage({
|
||||
userId: input.userId,
|
||||
sessionId: existingSession.id,
|
||||
role: 'assistant',
|
||||
contentMarkdown: answerMarkdown,
|
||||
citations: [],
|
||||
followUps: [],
|
||||
suggestedActions: parseCopilotResponse(answerMarkdown, [], query, input.memoSection ?? null).suggestedActions,
|
||||
selectedSources,
|
||||
pinnedArtifactIds,
|
||||
memoSection: input.memoSection ?? null
|
||||
});
|
||||
const session = await upsertResearchCopilotSessionState({
|
||||
userId: input.userId,
|
||||
ticker,
|
||||
title: existingSession.title ?? buildSessionTitle(query),
|
||||
selectedSources,
|
||||
pinnedArtifactIds
|
||||
});
|
||||
|
||||
return {
|
||||
session,
|
||||
user_message: userMessage,
|
||||
assistant_message: assistantMessage,
|
||||
results
|
||||
};
|
||||
}
|
||||
|
||||
const response = await runAiAnalysis(
|
||||
buildCopilotPrompt({
|
||||
ticker,
|
||||
query,
|
||||
selectedSources,
|
||||
memoSection: input.memoSection ?? null,
|
||||
memo,
|
||||
history,
|
||||
pinnedArtifacts,
|
||||
evidence
|
||||
}),
|
||||
'Return strict JSON only. Stay concise, factual, and operational.',
|
||||
{ workload: 'report' }
|
||||
);
|
||||
const parsed = parseCopilotResponse(response.text, evidence, query, input.memoSection ?? null);
|
||||
const citations = await buildCopilotCitations(input.userId, evidence, parsed.citationIndexes);
|
||||
const assistantMessage = await appendResearchCopilotMessage({
|
||||
userId: input.userId,
|
||||
sessionId: existingSession.id,
|
||||
role: 'assistant',
|
||||
contentMarkdown: parsed.answerMarkdown,
|
||||
citations,
|
||||
followUps: parsed.followUps,
|
||||
suggestedActions: parsed.suggestedActions,
|
||||
selectedSources,
|
||||
pinnedArtifactIds,
|
||||
memoSection: input.memoSection ?? null
|
||||
});
|
||||
|
||||
const session = await upsertResearchCopilotSessionState({
|
||||
userId: input.userId,
|
||||
ticker,
|
||||
title: existingSession.title ?? buildSessionTitle(query),
|
||||
selectedSources,
|
||||
pinnedArtifactIds
|
||||
});
|
||||
|
||||
return {
|
||||
session,
|
||||
user_message: userMessage,
|
||||
assistant_message: assistantMessage,
|
||||
results
|
||||
};
|
||||
}
|
||||
|
||||
function buildResearchBriefPrompt(input: {
|
||||
ticker: string;
|
||||
query: string;
|
||||
memo: ResearchMemo | null;
|
||||
evidence: SearchResult[];
|
||||
}) {
|
||||
const evidenceText = input.evidence.map((result, index) => [
|
||||
`[${index + 1}] ${result.citationLabel}`,
|
||||
`Title: ${result.title ?? result.sourceRef}`,
|
||||
`Excerpt: ${result.chunkText}`
|
||||
].join('\n')).join('\n\n');
|
||||
|
||||
return [
|
||||
'Write a longer-form buy-side research brief grounded only in the evidence below.',
|
||||
'Use markdown with these sections: Executive Summary, Key Evidence, Memo Implications, Open Questions.',
|
||||
`Ticker: ${input.ticker}`,
|
||||
`Brief request: ${input.query}`,
|
||||
`Memo posture: ${summarizeMemoPosture(input.memo)}`,
|
||||
'',
|
||||
'Evidence:',
|
||||
evidenceText
|
||||
].join('\n');
|
||||
}
|
||||
|
||||
export async function generateResearchBrief(input: {
|
||||
userId: string;
|
||||
ticker: string;
|
||||
query: string;
|
||||
selectedSources?: SearchSource[];
|
||||
}) {
|
||||
const selectedSources = normalizeSources(input.selectedSources);
|
||||
const memo = await getResearchMemoByTicker(input.userId, input.ticker);
|
||||
const results = await searchKnowledgeBase({
|
||||
userId: input.userId,
|
||||
query: input.query,
|
||||
ticker: input.ticker,
|
||||
sources: selectedSources,
|
||||
limit: 10
|
||||
});
|
||||
const evidence = buildEvidence(results);
|
||||
const response = await runAiAnalysis(
|
||||
buildResearchBriefPrompt({
|
||||
ticker: normalizeTicker(input.ticker),
|
||||
query: input.query.trim(),
|
||||
memo,
|
||||
evidence
|
||||
}),
|
||||
'Use neutral analyst prose and cite evidence inline like [1].',
|
||||
{ workload: 'report' }
|
||||
);
|
||||
|
||||
return {
|
||||
provider: response.provider,
|
||||
model: response.model,
|
||||
bodyMarkdown: response.text.trim(),
|
||||
evidence
|
||||
};
|
||||
}
|
||||
|
||||
export const __researchCopilotInternals = {
|
||||
buildCopilotPrompt,
|
||||
buildResearchBriefPrompt,
|
||||
extractJsonObject,
|
||||
parseCopilotResponse,
|
||||
parseCitationIndexes
|
||||
};
|
||||
@@ -120,4 +120,35 @@ describe('task notification builder', () => {
|
||||
expect(notification.detailLine).toBe('Could not load the primary filing document.');
|
||||
expect(notification.actions.some((action) => action.id === 'open_filings')).toBe(true);
|
||||
});
|
||||
|
||||
it('adds research navigation for completed research brief jobs', () => {
|
||||
const notification = buildTaskNotification(baseTask({
|
||||
task_type: 'research_brief',
|
||||
status: 'completed',
|
||||
stage: 'completed',
|
||||
stage_detail: 'Generated research brief artifact for NVDA.',
|
||||
stage_context: {
|
||||
subject: {
|
||||
ticker: 'NVDA'
|
||||
}
|
||||
},
|
||||
payload: {
|
||||
ticker: 'NVDA',
|
||||
query: 'Update the thesis'
|
||||
},
|
||||
result: {
|
||||
ticker: 'NVDA',
|
||||
artifactId: 12,
|
||||
model: 'test-model'
|
||||
},
|
||||
finished_at: '2026-03-09T10:06:00.000Z'
|
||||
}));
|
||||
|
||||
expect(notification.actions[0]).toMatchObject({
|
||||
id: 'open_research',
|
||||
href: '/research?ticker=NVDA',
|
||||
primary: true
|
||||
});
|
||||
expect(notification.stats.some((stat) => stat.label === 'Artifact' && stat.value === '12')).toBe(true);
|
||||
});
|
||||
});
|
||||
|
||||
@@ -117,6 +117,13 @@ function buildStats(task: TaskCore): TaskNotificationStat[] {
|
||||
);
|
||||
break;
|
||||
}
|
||||
case 'research_brief':
|
||||
stats.push(
|
||||
makeStat('Ticker', asString(result?.ticker) ?? task.stage_context?.subject?.ticker ?? null),
|
||||
makeStat('Artifact', asNumber(result?.artifactId) ?? null),
|
||||
makeStat('Model', asString(result?.model) ?? null)
|
||||
);
|
||||
break;
|
||||
}
|
||||
|
||||
if (stats.every((stat) => stat === null)) {
|
||||
@@ -194,6 +201,14 @@ function buildActions(task: TaskCore): TaskNotificationAction[] {
|
||||
primary: true
|
||||
});
|
||||
break;
|
||||
case 'research_brief':
|
||||
actions.push({
|
||||
id: 'open_research',
|
||||
label: 'Open research',
|
||||
href: ticker ? `/research?ticker=${encodeURIComponent(ticker)}` : '/research',
|
||||
primary: true
|
||||
});
|
||||
break;
|
||||
}
|
||||
|
||||
actions.push({
|
||||
|
||||
@@ -10,6 +10,7 @@ import type {
|
||||
import { runAiAnalysis } from '@/lib/server/ai';
|
||||
import { buildPortfolioSummary } from '@/lib/server/portfolio';
|
||||
import { getQuote } from '@/lib/server/prices';
|
||||
import { generateResearchBrief } from '@/lib/server/research-copilot';
|
||||
import { indexSearchDocuments } from '@/lib/server/search';
|
||||
import {
|
||||
getFilingByAccession,
|
||||
@@ -32,7 +33,9 @@ import {
|
||||
listUserHoldings
|
||||
} from '@/lib/server/repos/holdings';
|
||||
import { createPortfolioInsight } from '@/lib/server/repos/insights';
|
||||
import { createResearchArtifactRecord } from '@/lib/server/repos/research-library';
|
||||
import { updateTaskStage } from '@/lib/server/repos/tasks';
|
||||
import { updateWatchlistReviewByTicker } from '@/lib/server/repos/watchlist';
|
||||
import {
|
||||
fetchPrimaryFilingText,
|
||||
fetchRecentFilings
|
||||
@@ -1302,6 +1305,97 @@ async function processPortfolioInsights(task: Task) {
|
||||
);
|
||||
}
|
||||
|
||||
async function processResearchBrief(task: Task) {
|
||||
const ticker = typeof task.payload.ticker === 'string' ? task.payload.ticker.trim().toUpperCase() : '';
|
||||
const query = typeof task.payload.query === 'string' ? task.payload.query.trim() : '';
|
||||
const sources = Array.isArray(task.payload.sources)
|
||||
? task.payload.sources.filter((entry): entry is 'documents' | 'filings' | 'research' => entry === 'documents' || entry === 'filings' || entry === 'research')
|
||||
: undefined;
|
||||
|
||||
if (!ticker) {
|
||||
throw new Error('Research brief task requires a ticker');
|
||||
}
|
||||
|
||||
if (!query) {
|
||||
throw new Error('Research brief task requires a query');
|
||||
}
|
||||
|
||||
await setProjectionStage(task, 'research.retrieve', `Collecting evidence for ${ticker} research brief`, {
|
||||
subject: { ticker },
|
||||
progress: { current: 1, total: 3, unit: 'steps' }
|
||||
});
|
||||
|
||||
const brief = await generateResearchBrief({
|
||||
userId: task.user_id,
|
||||
ticker,
|
||||
query,
|
||||
selectedSources: sources
|
||||
});
|
||||
|
||||
await setProjectionStage(task, 'research.answer', `Generating research brief for ${ticker}`, {
|
||||
subject: { ticker },
|
||||
progress: { current: 2, total: 3, unit: 'steps' },
|
||||
counters: {
|
||||
evidence: brief.evidence.length
|
||||
}
|
||||
});
|
||||
|
||||
const summary = brief.bodyMarkdown
|
||||
.split('\n')
|
||||
.map((line) => line.trim())
|
||||
.find((line) => line.length > 0 && !line.startsWith('#'))
|
||||
?? `Generated research brief for ${ticker}.`;
|
||||
|
||||
await setProjectionStage(task, 'research.persist', `Saving research brief artifact for ${ticker}`, {
|
||||
subject: { ticker },
|
||||
progress: { current: 3, total: 3, unit: 'steps' },
|
||||
counters: {
|
||||
evidence: brief.evidence.length
|
||||
}
|
||||
});
|
||||
|
||||
const artifact = await createResearchArtifactRecord({
|
||||
userId: task.user_id,
|
||||
ticker,
|
||||
kind: 'ai_report',
|
||||
source: 'system',
|
||||
subtype: 'research_brief',
|
||||
title: `Research brief · ${query}`,
|
||||
summary,
|
||||
bodyMarkdown: brief.bodyMarkdown,
|
||||
tags: ['copilot', 'research-brief'],
|
||||
metadata: {
|
||||
query,
|
||||
sources: sources ?? ['documents', 'filings', 'research'],
|
||||
provider: brief.provider,
|
||||
model: brief.model,
|
||||
citations: brief.evidence.map((result, index) => ({
|
||||
index: index + 1,
|
||||
label: result.citationLabel,
|
||||
href: result.href
|
||||
}))
|
||||
}
|
||||
});
|
||||
|
||||
await updateWatchlistReviewByTicker(task.user_id, ticker, artifact.updated_at);
|
||||
|
||||
return buildTaskOutcome(
|
||||
{
|
||||
ticker,
|
||||
artifactId: artifact.id,
|
||||
provider: brief.provider,
|
||||
model: brief.model
|
||||
},
|
||||
`Generated research brief artifact for ${ticker}.`,
|
||||
{
|
||||
subject: { ticker },
|
||||
counters: {
|
||||
evidence: brief.evidence.length
|
||||
}
|
||||
}
|
||||
);
|
||||
}
|
||||
|
||||
export const __taskProcessorInternals = {
|
||||
parseExtractionPayload,
|
||||
deterministicExtractionFallback,
|
||||
@@ -1320,6 +1414,8 @@ export async function runTaskProcessor(task: Task) {
|
||||
return await processPortfolioInsights(task);
|
||||
case 'index_search':
|
||||
return await processIndexSearch(task);
|
||||
case 'research_brief':
|
||||
return await processResearchBrief(task);
|
||||
default:
|
||||
throw new Error(`Unsupported task type: ${task.task_type}`);
|
||||
}
|
||||
|
||||
Reference in New Issue
Block a user