Files
Neon-Desk/lib/server/repos/research-library.ts

1123 lines
34 KiB
TypeScript

import { randomUUID } from 'node:crypto';
import { mkdir, readFile, rm, writeFile } from 'node:fs/promises';
import { dirname, extname, join } from 'node:path';
import { and, asc, desc, eq, sql } from 'drizzle-orm';
import type {
ResearchArtifact,
ResearchArtifactKind,
ResearchArtifactSource,
ResearchJournalEntry,
ResearchJournalEntryType,
ResearchLibraryResponse,
ResearchMemo,
ResearchMemoConviction,
ResearchMemoEvidenceLink,
ResearchMemoRating,
ResearchMemoSection,
ResearchPacket,
ResearchPacketSection,
ResearchVisibilityScope,
ResearchWorkspace
} from '@/lib/types';
import { db, getSqliteClient } from '@/lib/server/db';
import {
researchArtifact,
researchMemo,
researchMemoEvidence
} from '@/lib/server/db/schema';
import { getFilingByAccession, listFilingsRecords } from '@/lib/server/repos/filings';
import { getWatchlistItemByTicker } from '@/lib/server/repos/watchlist';
type ResearchArtifactRow = typeof researchArtifact.$inferSelect;
type ResearchMemoRow = typeof researchMemo.$inferSelect;
type ResearchMemoEvidenceRow = typeof researchMemoEvidence.$inferSelect;
type ArtifactListFilters = {
ticker: string;
q?: string | null;
kind?: ResearchArtifactKind | null;
tag?: string | null;
source?: ResearchArtifactSource | null;
linkedToMemo?: boolean | null;
limit?: number;
};
const RESEARCH_PACKET_SECTION_TITLES: Record<ResearchMemoSection, string> = {
thesis: 'Core Thesis',
variant_view: 'Variant View',
catalysts: 'Catalysts',
risks: 'Risks',
disconfirming_evidence: 'Disconfirming Evidence',
next_actions: 'Next Actions'
};
function normalizeTicker(ticker: string) {
return ticker.trim().toUpperCase();
}
function normalizeOptionalString(value?: string | null) {
const normalized = value?.trim();
return normalized ? normalized : null;
}
function normalizePositiveInteger(value?: number | null) {
if (value === null || value === undefined || !Number.isFinite(value)) {
return null;
}
const normalized = Math.trunc(value);
return normalized > 0 ? normalized : null;
}
function normalizeRecord(value?: Record<string, unknown> | null) {
if (!value || typeof value !== 'object' || Array.isArray(value)) {
return null;
}
return value;
}
function normalizeTags(tags?: string[] | null) {
if (!Array.isArray(tags)) {
return [];
}
const unique = new Set<string>();
for (const entry of tags) {
if (typeof entry !== 'string') {
continue;
}
const normalized = entry.trim();
if (!normalized) {
continue;
}
unique.add(normalized);
}
return [...unique];
}
function buildArtifactSearchText(input: {
title?: string | null;
summary?: string | null;
bodyMarkdown?: string | null;
fileName?: string | null;
subtype?: string | null;
metadata?: Record<string, unknown> | null;
}) {
const parts = [
normalizeOptionalString(input.title),
normalizeOptionalString(input.summary),
normalizeOptionalString(input.bodyMarkdown),
normalizeOptionalString(input.fileName),
normalizeOptionalString(input.subtype),
input.metadata ? JSON.stringify(input.metadata) : null
].filter((value): value is string => Boolean(value));
return parts.length > 0 ? parts.join('\n') : null;
}
function toResearchArtifact(row: ResearchArtifactRow, linkedToMemo = false): ResearchArtifact {
return {
id: row.id,
user_id: row.user_id,
organization_id: row.organization_id ?? null,
ticker: row.ticker,
accession_number: row.accession_number ?? null,
kind: row.kind,
source: row.source,
subtype: row.subtype ?? null,
title: row.title ?? null,
summary: row.summary ?? null,
body_markdown: row.body_markdown ?? null,
search_text: row.search_text ?? null,
visibility_scope: row.visibility_scope,
tags: normalizeTags(row.tags ?? []),
metadata: row.metadata ?? null,
file_name: row.file_name ?? null,
mime_type: row.mime_type ?? null,
file_size_bytes: row.file_size_bytes ?? null,
storage_path: row.storage_path ?? null,
created_at: row.created_at,
updated_at: row.updated_at,
linked_to_memo: linkedToMemo
};
}
function toResearchMemo(row: ResearchMemoRow): ResearchMemo {
return {
id: row.id,
user_id: row.user_id,
organization_id: row.organization_id ?? null,
ticker: row.ticker,
rating: row.rating ?? null,
conviction: row.conviction ?? null,
time_horizon_months: row.time_horizon_months ?? null,
packet_title: row.packet_title ?? null,
packet_subtitle: row.packet_subtitle ?? null,
thesis_markdown: row.thesis_markdown,
variant_view_markdown: row.variant_view_markdown,
catalysts_markdown: row.catalysts_markdown,
risks_markdown: row.risks_markdown,
disconfirming_evidence_markdown: row.disconfirming_evidence_markdown,
next_actions_markdown: row.next_actions_markdown,
created_at: row.created_at,
updated_at: row.updated_at
};
}
function toJournalType(kind: ResearchArtifactKind, accessionNumber: string | null): ResearchJournalEntryType | null {
if (kind === 'status_change') {
return 'status_change';
}
if (kind === 'note') {
return accessionNumber ? 'filing_note' : 'note';
}
if (kind === 'filing' || kind === 'ai_report') {
return 'filing_note';
}
return null;
}
function toResearchJournalEntry(artifact: ResearchArtifact): ResearchJournalEntry | null {
const entryType = toJournalType(artifact.kind, artifact.accession_number);
if (!entryType) {
return null;
}
return {
id: artifact.id,
user_id: artifact.user_id,
ticker: artifact.ticker,
accession_number: artifact.accession_number,
entry_type: entryType,
title: artifact.title,
body_markdown: artifact.body_markdown ?? artifact.summary ?? '',
metadata: artifact.metadata,
created_at: artifact.created_at,
updated_at: artifact.updated_at
};
}
function toPacketSections(
memo: ResearchMemo | null,
evidenceBySection: Map<ResearchMemoSection, ResearchMemoEvidenceLink[]>
): ResearchPacketSection[] {
const bodyBySection: Record<ResearchMemoSection, string> = {
thesis: memo?.thesis_markdown ?? '',
variant_view: memo?.variant_view_markdown ?? '',
catalysts: memo?.catalysts_markdown ?? '',
risks: memo?.risks_markdown ?? '',
disconfirming_evidence: memo?.disconfirming_evidence_markdown ?? '',
next_actions: memo?.next_actions_markdown ?? ''
};
return (Object.keys(RESEARCH_PACKET_SECTION_TITLES) as ResearchMemoSection[]).map((section) => ({
section,
title: RESEARCH_PACKET_SECTION_TITLES[section],
body_markdown: bodyBySection[section],
evidence: evidenceBySection.get(section) ?? []
}));
}
function safeFileName(fileName: string) {
const normalized = fileName
.trim()
.replace(/[^a-zA-Z0-9._-]+/g, '-')
.replace(/-+/g, '-')
.replace(/^-+|-+$/g, '');
return normalized || 'research-file';
}
function getUploadsRoot() {
return join(process.cwd(), 'data', 'research-uploads');
}
function syncArtifactSearchIndex(artifactId: number) {
const client = getSqliteClient();
client.query('DELETE FROM `research_artifact_fts` WHERE `artifact_id` = ?').run(artifactId);
const row = client.query(`
SELECT
id,
user_id,
ticker,
title,
summary,
body_markdown,
search_text,
tags
FROM research_artifact
WHERE id = ?
LIMIT 1
`).get(artifactId) as {
id: number;
user_id: string;
ticker: string;
title: string | null;
summary: string | null;
body_markdown: string | null;
search_text: string | null;
tags: string | null;
} | null;
if (!row) {
return;
}
let tagsText = '';
try {
const parsed = JSON.parse(row.tags ?? '[]');
if (Array.isArray(parsed)) {
tagsText = parsed.filter((entry): entry is string => typeof entry === 'string').join(' ');
}
} catch {
tagsText = '';
}
client.query(`
INSERT INTO research_artifact_fts (
artifact_id,
user_id,
ticker,
title,
summary,
body_markdown,
search_text,
tags_text
) VALUES (?, ?, ?, ?, ?, ?, ?, ?)
`).run(
row.id,
row.user_id,
row.ticker,
row.title ?? '',
row.summary ?? '',
row.body_markdown ?? '',
row.search_text ?? '',
tagsText
);
}
function rebuildArtifactSearchIndex() {
const client = getSqliteClient();
client.exec('DELETE FROM `research_artifact_fts`;');
client.exec(`
INSERT INTO research_artifact_fts (
artifact_id,
user_id,
ticker,
title,
summary,
body_markdown,
search_text,
tags_text
)
SELECT
id,
user_id,
ticker,
COALESCE(title, ''),
COALESCE(summary, ''),
COALESCE(body_markdown, ''),
COALESCE(search_text, ''),
CASE
WHEN tags IS NULL OR TRIM(tags) = '' THEN ''
ELSE REPLACE(REPLACE(REPLACE(tags, '[', ''), ']', ''), '\"', '')
END
FROM research_artifact
`);
}
function toArtifactOrder(rows: ResearchArtifactRow[], order: number[]) {
const byId = new Map(rows.map((row) => [row.id, row]));
return order.map((id) => byId.get(id)).filter((row): row is ResearchArtifactRow => Boolean(row));
}
async function listMemoLinkedArtifactIds(userId: string, ticker: string) {
const rows = await db
.select({
artifactId: researchMemoEvidence.artifact_id
})
.from(researchMemoEvidence)
.innerJoin(researchMemo, eq(researchMemoEvidence.memo_id, researchMemo.id))
.where(and(eq(researchMemo.user_id, userId), eq(researchMemo.ticker, ticker)));
return new Set(rows.map((row) => row.artifactId));
}
async function listMemoEvidenceRows(memoId: number) {
const rows = await db
.select()
.from(researchMemoEvidence)
.where(eq(researchMemoEvidence.memo_id, memoId))
.orderBy(asc(researchMemoEvidence.section), asc(researchMemoEvidence.sort_order), asc(researchMemoEvidence.id));
return rows;
}
async function getArtifactByIdForUser(id: number, userId: string) {
const [row] = await db
.select()
.from(researchArtifact)
.where(and(eq(researchArtifact.id, id), eq(researchArtifact.user_id, userId)))
.limit(1);
return row ?? null;
}
async function getMemoByIdForUser(id: number, userId: string) {
const [row] = await db
.select()
.from(researchMemo)
.where(and(eq(researchMemo.id, id), eq(researchMemo.user_id, userId)))
.limit(1);
return row ?? null;
}
export async function createResearchArtifactRecord(input: {
userId: string;
organizationId?: string | null;
ticker: string;
accessionNumber?: string | null;
kind: ResearchArtifactKind;
source?: ResearchArtifactSource;
subtype?: string | null;
title?: string | null;
summary?: string | null;
bodyMarkdown?: string | null;
searchText?: string | null;
visibilityScope?: ResearchVisibilityScope;
tags?: string[];
metadata?: Record<string, unknown> | null;
fileName?: string | null;
mimeType?: string | null;
fileSizeBytes?: number | null;
storagePath?: string | null;
}) {
const ticker = normalizeTicker(input.ticker);
if (!ticker) {
throw new Error('ticker is required');
}
const now = new Date().toISOString();
const title = normalizeOptionalString(input.title);
const summary = normalizeOptionalString(input.summary);
const bodyMarkdown = normalizeOptionalString(input.bodyMarkdown);
const subtype = normalizeOptionalString(input.subtype);
const metadata = normalizeRecord(input.metadata);
const fileName = normalizeOptionalString(input.fileName);
const mimeType = normalizeOptionalString(input.mimeType);
const searchText = normalizeOptionalString(input.searchText)
?? buildArtifactSearchText({
title,
summary,
bodyMarkdown,
fileName,
subtype,
metadata
});
const [created] = await db
.insert(researchArtifact)
.values({
user_id: input.userId,
organization_id: normalizeOptionalString(input.organizationId),
ticker,
accession_number: normalizeOptionalString(input.accessionNumber),
kind: input.kind,
source: input.source ?? 'user',
subtype,
title,
summary,
body_markdown: bodyMarkdown,
search_text: searchText,
visibility_scope: input.visibilityScope ?? 'private',
tags: normalizeTags(input.tags),
metadata,
file_name: fileName,
mime_type: mimeType,
file_size_bytes: normalizePositiveInteger(input.fileSizeBytes),
storage_path: normalizeOptionalString(input.storagePath),
created_at: now,
updated_at: now
})
.returning();
syncArtifactSearchIndex(created.id);
return toResearchArtifact(created);
}
async function upsertSystemResearchArtifact(input: {
userId: string;
organizationId?: string | null;
ticker: string;
accessionNumber?: string | null;
kind: Extract<ResearchArtifactKind, 'ai_report' | 'filing' | 'status_change'>;
subtype?: string | null;
title?: string | null;
summary?: string | null;
bodyMarkdown?: string | null;
tags?: string[];
metadata?: Record<string, unknown> | null;
}) {
const ticker = normalizeTicker(input.ticker);
const accessionNumber = normalizeOptionalString(input.accessionNumber);
const title = normalizeOptionalString(input.title);
const [existing] = await db
.select()
.from(researchArtifact)
.where(and(
eq(researchArtifact.user_id, input.userId),
eq(researchArtifact.ticker, ticker),
eq(researchArtifact.kind, input.kind),
accessionNumber
? eq(researchArtifact.accession_number, accessionNumber)
: sql`1 = 1`,
title
? eq(researchArtifact.title, title)
: sql`1 = 1`
))
.orderBy(desc(researchArtifact.updated_at))
.limit(1);
if (!existing) {
return await createResearchArtifactRecord({
...input,
ticker,
accessionNumber,
title,
source: 'system',
visibilityScope: 'private'
});
}
const searchText = buildArtifactSearchText({
title,
summary: input.summary,
bodyMarkdown: input.bodyMarkdown,
subtype: input.subtype,
metadata: input.metadata
});
const [updated] = await db
.update(researchArtifact)
.set({
organization_id: normalizeOptionalString(input.organizationId) ?? existing.organization_id ?? null,
subtype: normalizeOptionalString(input.subtype),
title,
summary: normalizeOptionalString(input.summary),
body_markdown: normalizeOptionalString(input.bodyMarkdown),
search_text: searchText,
tags: normalizeTags(input.tags),
metadata: normalizeRecord(input.metadata),
updated_at: new Date().toISOString()
})
.where(eq(researchArtifact.id, existing.id))
.returning();
syncArtifactSearchIndex(updated.id);
return toResearchArtifact(updated);
}
export async function updateResearchArtifactRecord(input: {
userId: string;
id: number;
title?: string | null;
summary?: string | null;
bodyMarkdown?: string | null;
tags?: string[];
metadata?: Record<string, unknown> | null;
}) {
const existing = await getArtifactByIdForUser(input.id, input.userId);
if (!existing) {
return null;
}
const title = input.title === undefined ? existing.title : normalizeOptionalString(input.title);
const summary = input.summary === undefined ? existing.summary : normalizeOptionalString(input.summary);
const bodyMarkdown = input.bodyMarkdown === undefined ? existing.body_markdown : normalizeOptionalString(input.bodyMarkdown);
const metadata = input.metadata === undefined ? existing.metadata ?? null : normalizeRecord(input.metadata);
const tags = input.tags === undefined ? normalizeTags(existing.tags ?? []) : normalizeTags(input.tags);
const searchText = buildArtifactSearchText({
title,
summary,
bodyMarkdown,
fileName: existing.file_name,
subtype: existing.subtype,
metadata
});
const [updated] = await db
.update(researchArtifact)
.set({
title,
summary,
body_markdown: bodyMarkdown,
metadata,
tags,
search_text: searchText,
updated_at: new Date().toISOString()
})
.where(eq(researchArtifact.id, input.id))
.returning();
syncArtifactSearchIndex(updated.id);
return toResearchArtifact(updated);
}
export async function deleteResearchArtifactRecord(userId: string, id: number) {
const existing = await getArtifactByIdForUser(id, userId);
if (!existing) {
return false;
}
await db
.delete(researchArtifact)
.where(and(eq(researchArtifact.id, id), eq(researchArtifact.user_id, userId)));
syncArtifactSearchIndex(id);
if (existing.storage_path) {
await rm(existing.storage_path, { force: true }).catch(() => undefined);
}
return true;
}
export async function listResearchArtifacts(userId: string, filters: ArtifactListFilters): Promise<ResearchLibraryResponse> {
const ticker = normalizeTicker(filters.ticker);
if (!ticker) {
return { artifacts: [], availableTags: [] };
}
const limit = Math.min(Math.max(Math.trunc(filters.limit ?? 100), 1), 250);
const client = getSqliteClient();
const linkedIds = await listMemoLinkedArtifactIds(userId, ticker);
const normalizedKind = filters.kind ?? null;
const normalizedTag = normalizeOptionalString(filters.tag);
const normalizedSource = filters.source ?? null;
const normalizedQuery = normalizeOptionalString(filters.q);
const linkedOnly = filters.linkedToMemo ?? null;
let artifactRows: ResearchArtifactRow[] = [];
if (normalizedQuery) {
const ids = client.query(`
SELECT artifact_id AS id
FROM research_artifact_fts
WHERE research_artifact_fts MATCH ?
AND user_id = ?
AND ticker = ?
ORDER BY bm25(research_artifact_fts), rowid DESC
LIMIT ?
`).all(normalizedQuery, userId, ticker, Math.max(limit * 3, 60)) as Array<{ id: number }>;
const orderedIds = ids.map((row) => row.id);
if (orderedIds.length > 0) {
const rows = await db
.select()
.from(researchArtifact)
.where(sql`${researchArtifact.id} in (${sql.join(orderedIds.map((id) => sql`${id}`), sql`, `)})`);
artifactRows = toArtifactOrder(rows, orderedIds);
}
} else {
const rows = await db
.select()
.from(researchArtifact)
.where(and(eq(researchArtifact.user_id, userId), eq(researchArtifact.ticker, ticker)))
.orderBy(desc(researchArtifact.updated_at), desc(researchArtifact.id))
.limit(Math.max(limit * 3, 60));
artifactRows = rows;
}
const filteredArtifacts = artifactRows
.filter((row) => normalizedKind === null || row.kind === normalizedKind)
.filter((row) => normalizedSource === null || row.source === normalizedSource)
.filter((row) => normalizedTag === null || normalizeTags(row.tags ?? []).includes(normalizedTag))
.filter((row) => linkedOnly === null || linkedIds.has(row.id) === linkedOnly)
.slice(0, limit)
.map((row) => toResearchArtifact(row, linkedIds.has(row.id)));
const allRowsForTags = await db
.select({ tags: researchArtifact.tags })
.from(researchArtifact)
.where(and(eq(researchArtifact.user_id, userId), eq(researchArtifact.ticker, ticker)));
const tagSet = new Set<string>();
for (const row of allRowsForTags) {
for (const tag of normalizeTags(row.tags ?? [])) {
tagSet.add(tag);
}
}
return {
artifacts: filteredArtifacts,
availableTags: [...tagSet].sort((a, b) => a.localeCompare(b))
};
}
export async function getResearchMemoByTicker(userId: string, ticker: string) {
const normalizedTicker = normalizeTicker(ticker);
if (!normalizedTicker) {
return null;
}
const [row] = await db
.select()
.from(researchMemo)
.where(and(eq(researchMemo.user_id, userId), eq(researchMemo.ticker, normalizedTicker)))
.limit(1);
return row ? toResearchMemo(row) : null;
}
export async function upsertResearchMemoRecord(input: {
userId: string;
organizationId?: string | null;
ticker: string;
rating?: ResearchMemoRating | null;
conviction?: ResearchMemoConviction | null;
timeHorizonMonths?: number | null;
packetTitle?: string | null;
packetSubtitle?: string | null;
thesisMarkdown?: string;
variantViewMarkdown?: string;
catalystsMarkdown?: string;
risksMarkdown?: string;
disconfirmingEvidenceMarkdown?: string;
nextActionsMarkdown?: string;
}) {
const ticker = normalizeTicker(input.ticker);
if (!ticker) {
throw new Error('ticker is required');
}
const now = new Date().toISOString();
const existing = await getResearchMemoByTicker(input.userId, ticker);
if (!existing) {
const [created] = await db
.insert(researchMemo)
.values({
user_id: input.userId,
organization_id: normalizeOptionalString(input.organizationId),
ticker,
rating: input.rating ?? null,
conviction: input.conviction ?? null,
time_horizon_months: normalizePositiveInteger(input.timeHorizonMonths),
packet_title: normalizeOptionalString(input.packetTitle),
packet_subtitle: normalizeOptionalString(input.packetSubtitle),
thesis_markdown: input.thesisMarkdown?.trim() ?? '',
variant_view_markdown: input.variantViewMarkdown?.trim() ?? '',
catalysts_markdown: input.catalystsMarkdown?.trim() ?? '',
risks_markdown: input.risksMarkdown?.trim() ?? '',
disconfirming_evidence_markdown: input.disconfirmingEvidenceMarkdown?.trim() ?? '',
next_actions_markdown: input.nextActionsMarkdown?.trim() ?? '',
created_at: now,
updated_at: now
})
.returning();
return toResearchMemo(created);
}
const [updated] = await db
.update(researchMemo)
.set({
organization_id: normalizeOptionalString(input.organizationId) ?? existing.organization_id,
rating: input.rating === undefined ? existing.rating : input.rating,
conviction: input.conviction === undefined ? existing.conviction : input.conviction,
time_horizon_months: input.timeHorizonMonths === undefined
? existing.time_horizon_months
: normalizePositiveInteger(input.timeHorizonMonths),
packet_title: input.packetTitle === undefined ? existing.packet_title : normalizeOptionalString(input.packetTitle),
packet_subtitle: input.packetSubtitle === undefined ? existing.packet_subtitle : normalizeOptionalString(input.packetSubtitle),
thesis_markdown: input.thesisMarkdown === undefined ? existing.thesis_markdown : input.thesisMarkdown.trim(),
variant_view_markdown: input.variantViewMarkdown === undefined ? existing.variant_view_markdown : input.variantViewMarkdown.trim(),
catalysts_markdown: input.catalystsMarkdown === undefined ? existing.catalysts_markdown : input.catalystsMarkdown.trim(),
risks_markdown: input.risksMarkdown === undefined ? existing.risks_markdown : input.risksMarkdown.trim(),
disconfirming_evidence_markdown: input.disconfirmingEvidenceMarkdown === undefined ? existing.disconfirming_evidence_markdown : input.disconfirmingEvidenceMarkdown.trim(),
next_actions_markdown: input.nextActionsMarkdown === undefined ? existing.next_actions_markdown : input.nextActionsMarkdown.trim(),
updated_at: now
})
.where(and(eq(researchMemo.user_id, input.userId), eq(researchMemo.ticker, ticker)))
.returning();
return toResearchMemo(updated);
}
export async function addResearchMemoEvidenceLink(input: {
userId: string;
memoId: number;
artifactId: number;
section: ResearchMemoSection;
annotation?: string | null;
sortOrder?: number | null;
}) {
const memo = await getMemoByIdForUser(input.memoId, input.userId);
if (!memo) {
throw new Error('Research memo not found');
}
const artifact = await getArtifactByIdForUser(input.artifactId, input.userId);
if (!artifact) {
throw new Error('Research artifact not found');
}
if (artifact.ticker !== memo.ticker) {
throw new Error('Memo evidence must reference the same ticker');
}
const existing = await db
.select()
.from(researchMemoEvidence)
.where(and(
eq(researchMemoEvidence.memo_id, input.memoId),
eq(researchMemoEvidence.artifact_id, input.artifactId),
eq(researchMemoEvidence.section, input.section)
))
.limit(1);
const desiredSortOrder = input.sortOrder === undefined || input.sortOrder === null
? await db
.select({ maxOrder: sql<number>`coalesce(max(${researchMemoEvidence.sort_order}), 0)` })
.from(researchMemoEvidence)
.where(and(eq(researchMemoEvidence.memo_id, input.memoId), eq(researchMemoEvidence.section, input.section)))
.then((rows) => (rows[0]?.maxOrder ?? 0) + 1)
: Math.max(0, Math.trunc(input.sortOrder));
const now = new Date().toISOString();
if (existing.length > 0) {
await db
.update(researchMemoEvidence)
.set({
annotation: normalizeOptionalString(input.annotation),
sort_order: desiredSortOrder
})
.where(eq(researchMemoEvidence.id, existing[0].id));
} else {
await db
.insert(researchMemoEvidence)
.values({
memo_id: input.memoId,
artifact_id: input.artifactId,
section: input.section,
annotation: normalizeOptionalString(input.annotation),
sort_order: desiredSortOrder,
created_at: now
});
}
const memoEntity = toResearchMemo(memo);
const evidence = await listResearchMemoEvidenceLinks(input.userId, memoEntity.ticker);
return evidence.filter((item) => item.memo_id === input.memoId);
}
export async function deleteResearchMemoEvidenceLink(userId: string, memoId: number, linkId: number) {
const memo = await getMemoByIdForUser(memoId, userId);
if (!memo) {
return false;
}
const rows = await db
.delete(researchMemoEvidence)
.where(and(eq(researchMemoEvidence.id, linkId), eq(researchMemoEvidence.memo_id, memoId)))
.returning({ id: researchMemoEvidence.id });
return rows.length > 0;
}
async function listResearchMemoEvidenceLinks(userId: string, ticker: string): Promise<ResearchMemoEvidenceLink[]> {
const memo = await getResearchMemoByTicker(userId, ticker);
if (!memo) {
return [];
}
const evidenceRows = await listMemoEvidenceRows(memo.id);
if (evidenceRows.length === 0) {
return [];
}
const artifactIds = evidenceRows.map((row) => row.artifact_id);
const artifactRows = await db
.select()
.from(researchArtifact)
.where(sql`${researchArtifact.id} in (${sql.join(artifactIds.map((id) => sql`${id}`), sql`, `)})`);
const artifactMap = new Map(artifactRows.map((row) => [row.id, row]));
return evidenceRows
.map((row) => {
const artifact = artifactMap.get(row.artifact_id);
if (!artifact) {
return null;
}
return {
id: row.id,
memo_id: row.memo_id,
artifact_id: row.artifact_id,
section: row.section,
annotation: row.annotation ?? null,
sort_order: row.sort_order,
created_at: row.created_at,
artifact: toResearchArtifact(artifact, true)
} satisfies ResearchMemoEvidenceLink;
})
.filter((row): row is ResearchMemoEvidenceLink => Boolean(row));
}
export async function getResearchPacket(userId: string, ticker: string): Promise<ResearchPacket> {
const normalizedTicker = normalizeTicker(ticker);
const memo = await getResearchMemoByTicker(userId, normalizedTicker);
const evidence = await listResearchMemoEvidenceLinks(userId, normalizedTicker);
const coverage = await getWatchlistItemByTicker(userId, normalizedTicker);
const latestFiling = (await listFilingsRecords({ ticker: normalizedTicker, limit: 1 }))[0] ?? null;
const evidenceBySection = new Map<ResearchMemoSection, ResearchMemoEvidenceLink[]>();
for (const item of evidence) {
const list = evidenceBySection.get(item.section) ?? [];
list.push(item);
evidenceBySection.set(item.section, list);
}
return {
ticker: normalizedTicker,
companyName: coverage?.company_name ?? latestFiling?.company_name ?? null,
generated_at: new Date().toISOString(),
memo,
sections: toPacketSections(memo, evidenceBySection)
};
}
export async function getResearchWorkspace(userId: string, ticker: string): Promise<ResearchWorkspace> {
const normalizedTicker = normalizeTicker(ticker);
const [coverage, memo, library, packet, latestFiling] = await Promise.all([
getWatchlistItemByTicker(userId, normalizedTicker),
getResearchMemoByTicker(userId, normalizedTicker),
listResearchArtifacts(userId, { ticker: normalizedTicker, limit: 40 }),
getResearchPacket(userId, normalizedTicker),
listFilingsRecords({ ticker: normalizedTicker, limit: 1 })
]);
return {
ticker: normalizedTicker,
companyName: coverage?.company_name ?? latestFiling[0]?.company_name ?? null,
coverage,
latestFilingDate: latestFiling[0]?.filing_date ?? null,
memo,
library: library.artifacts,
packet,
availableTags: library.availableTags
};
}
export async function listResearchJournalEntriesCompat(userId: string, ticker: string, limit = 100) {
const { artifacts } = await listResearchArtifacts(userId, {
ticker,
limit: Math.min(Math.max(Math.trunc(limit), 1), 250)
});
return artifacts
.map(toResearchJournalEntry)
.filter((entry): entry is ResearchJournalEntry => Boolean(entry))
.slice(0, limit);
}
export async function createResearchJournalEntryCompat(input: {
userId: string;
ticker: string;
accessionNumber?: string | null;
entryType: ResearchJournalEntryType;
title?: string | null;
bodyMarkdown: string;
metadata?: Record<string, unknown> | null;
}) {
const kind: ResearchArtifactKind = input.entryType === 'status_change' ? 'status_change' : 'note';
const created = await createResearchArtifactRecord({
userId: input.userId,
ticker: input.ticker,
accessionNumber: input.accessionNumber,
kind,
source: input.entryType === 'status_change' ? 'system' : 'user',
subtype: input.entryType,
title: input.title,
summary: input.bodyMarkdown,
bodyMarkdown: input.bodyMarkdown,
metadata: input.metadata
});
return toResearchJournalEntry(created);
}
export async function updateResearchJournalEntryCompat(input: {
userId: string;
id: number;
title?: string | null;
bodyMarkdown?: string;
metadata?: Record<string, unknown> | null;
}) {
const updated = await updateResearchArtifactRecord({
userId: input.userId,
id: input.id,
title: input.title,
bodyMarkdown: input.bodyMarkdown,
metadata: input.metadata
});
return updated ? toResearchJournalEntry(updated) : null;
}
export async function deleteResearchJournalEntryCompat(userId: string, id: number) {
return await deleteResearchArtifactRecord(userId, id);
}
export async function createAiReportArtifactFromAccession(userId: string, accessionNumber: string) {
const filing = await getFilingByAccession(accessionNumber);
if (!filing) {
throw new Error('Filing not found');
}
const summary = filing.analysis?.text ?? filing.analysis?.legacyInsights ?? '';
if (!summary) {
throw new Error('AI summary not found');
}
return await upsertSystemResearchArtifact({
userId,
ticker: filing.ticker,
accessionNumber: filing.accession_number,
kind: 'ai_report',
subtype: 'filing_analysis',
title: `${filing.filing_type} AI memo`,
summary,
bodyMarkdown: [
`Stored AI memo for ${filing.company_name} (${filing.ticker}).`,
`Accession: ${filing.accession_number}`,
'',
summary
].join('\n'),
metadata: {
provider: filing.analysis?.provider ?? 'unknown',
model: filing.analysis?.model ?? 'unknown',
filingType: filing.filing_type,
filingDate: filing.filing_date
}
});
}
export async function createFilingArtifactFromAccession(userId: string, accessionNumber: string) {
const filing = await getFilingByAccession(accessionNumber);
if (!filing) {
throw new Error('Filing not found');
}
return await upsertSystemResearchArtifact({
userId,
ticker: filing.ticker,
accessionNumber: filing.accession_number,
kind: 'filing',
subtype: 'filing_snapshot',
title: `${filing.filing_type} filing snapshot`,
summary: filing.analysis?.text ?? filing.analysis?.legacyInsights ?? `Captured filing ${filing.accession_number}.`,
bodyMarkdown: [
`Captured filing note for ${filing.company_name} (${filing.ticker}).`,
`Filed: ${filing.filing_date}`,
`Accession: ${filing.accession_number}`,
'',
filing.analysis?.text ?? filing.analysis?.legacyInsights ?? 'Review this filing for thesis updates.'
].join('\n'),
metadata: {
filingType: filing.filing_type,
filingDate: filing.filing_date,
filingUrl: filing.filing_url,
submissionUrl: filing.submission_url ?? null,
primaryDocument: filing.primary_document ?? null
}
});
}
export async function storeResearchUpload(input: {
userId: string;
ticker: string;
file: File;
title?: string | null;
summary?: string | null;
tags?: string[];
metadata?: Record<string, unknown> | null;
}) {
const ticker = normalizeTicker(input.ticker);
if (!ticker) {
throw new Error('ticker is required');
}
if (!input.file || input.file.size === 0) {
throw new Error('file is required');
}
const extension = extname(input.file.name || '').slice(0, 16);
const fileName = safeFileName(input.file.name || `upload${extension}`);
const relativePath = join(input.userId, ticker, `${Date.now()}-${randomUUID()}-${fileName}`);
const storagePath = join(getUploadsRoot(), relativePath);
await mkdir(dirname(storagePath), { recursive: true });
const bytes = Buffer.from(await input.file.arrayBuffer());
await writeFile(storagePath, bytes);
return await createResearchArtifactRecord({
userId: input.userId,
ticker,
kind: 'upload',
source: 'user',
subtype: normalizeOptionalString(input.file.type) ?? 'upload',
title: input.title ?? fileName,
summary: input.summary,
bodyMarkdown: null,
tags: input.tags,
metadata: {
...(normalizeRecord(input.metadata) ?? {}),
originalFileName: input.file.name,
relativePath
},
fileName,
mimeType: input.file.type || 'application/octet-stream',
fileSizeBytes: input.file.size,
storagePath
});
}
export async function getResearchArtifactFileResponse(userId: string, id: number) {
const artifact = await getArtifactByIdForUser(id, userId);
if (!artifact || !artifact.storage_path || artifact.kind !== 'upload') {
return null;
}
const bytes = await readFile(artifact.storage_path).catch(() => null);
if (!bytes) {
return null;
}
return new Response(bytes, {
headers: {
'content-type': artifact.mime_type ?? 'application/octet-stream',
'content-disposition': `attachment; filename="${artifact.file_name ?? `research-upload-${artifact.id}`}"`,
'cache-control': 'no-store'
}
});
}
function rebuildResearchArtifactIndex() {
rebuildArtifactSearchIndex();
}