Implement fiscal-style research MVP flows
Some checks failed
PR Checks / typecheck-and-build (push) Has been cancelled
Some checks failed
PR Checks / typecheck-and-build (push) Has been cancelled
This commit is contained in:
109
lib/api.ts
109
lib/api.ts
@@ -4,12 +4,16 @@ import type {
|
||||
CompanyAiReportDetail,
|
||||
CompanyAnalysis,
|
||||
CompanyFinancialStatementsResponse,
|
||||
CoveragePriority,
|
||||
CoverageStatus,
|
||||
Filing,
|
||||
Holding,
|
||||
FinancialHistoryWindow,
|
||||
FinancialStatementKind,
|
||||
PortfolioInsight,
|
||||
PortfolioSummary,
|
||||
ResearchJournalEntry,
|
||||
ResearchJournalEntryType,
|
||||
Task,
|
||||
TaskStatus,
|
||||
TaskTimeline,
|
||||
@@ -99,6 +103,36 @@ async function unwrapData<T>(result: TreatyResult, fallback: string) {
|
||||
return payload as T;
|
||||
}
|
||||
|
||||
async function requestJson<T>(input: {
|
||||
path: string;
|
||||
method?: 'GET' | 'POST' | 'PATCH' | 'DELETE';
|
||||
body?: unknown;
|
||||
}, fallback: string) {
|
||||
const response = await fetch(`${API_BASE}${input.path}`, {
|
||||
method: input.method ?? 'GET',
|
||||
credentials: 'include',
|
||||
cache: 'no-store',
|
||||
headers: input.body === undefined ? undefined : {
|
||||
'content-type': 'application/json'
|
||||
},
|
||||
body: input.body === undefined ? undefined : JSON.stringify(input.body)
|
||||
});
|
||||
|
||||
const payload = await response.json().catch(() => null);
|
||||
if (!response.ok) {
|
||||
throw new ApiError(
|
||||
extractErrorMessage({ value: payload }, fallback),
|
||||
response.status
|
||||
);
|
||||
}
|
||||
|
||||
if (payload === null || payload === undefined) {
|
||||
throw new ApiError(fallback, response.status);
|
||||
}
|
||||
|
||||
return payload as T;
|
||||
}
|
||||
|
||||
export async function getMe() {
|
||||
const result = await client.api.me.get();
|
||||
return await unwrapData<{ user: User }>(result, 'Unable to fetch session');
|
||||
@@ -115,16 +149,80 @@ export async function upsertWatchlistItem(input: {
|
||||
sector?: string;
|
||||
category?: string;
|
||||
tags?: string[];
|
||||
status?: CoverageStatus;
|
||||
priority?: CoveragePriority;
|
||||
lastReviewedAt?: string;
|
||||
}) {
|
||||
const result = await client.api.watchlist.post(input);
|
||||
return await unwrapData<{ item: WatchlistItem }>(result, 'Unable to save watchlist item');
|
||||
}
|
||||
|
||||
export async function updateWatchlistItem(id: number, input: {
|
||||
companyName?: string;
|
||||
sector?: string;
|
||||
category?: string;
|
||||
tags?: string[];
|
||||
status?: CoverageStatus;
|
||||
priority?: CoveragePriority;
|
||||
lastReviewedAt?: string;
|
||||
}) {
|
||||
return await requestJson<{ item: WatchlistItem; statusChangeJournalCreated: boolean }>({
|
||||
path: `/api/watchlist/${id}`,
|
||||
method: 'PATCH',
|
||||
body: input
|
||||
}, 'Unable to update watchlist item');
|
||||
}
|
||||
|
||||
export async function deleteWatchlistItem(id: number) {
|
||||
const result = await client.api.watchlist[id].delete();
|
||||
return await unwrapData<{ success: boolean }>(result, 'Unable to delete watchlist item');
|
||||
}
|
||||
|
||||
export async function listResearchJournal(ticker: string) {
|
||||
const result = await client.api.research.journal.get({
|
||||
$query: {
|
||||
ticker: ticker.trim().toUpperCase()
|
||||
}
|
||||
});
|
||||
|
||||
return await unwrapData<{ entries: ResearchJournalEntry[] }>(result, 'Unable to fetch research journal');
|
||||
}
|
||||
|
||||
export async function createResearchJournalEntry(input: {
|
||||
ticker: string;
|
||||
accessionNumber?: string;
|
||||
entryType: ResearchJournalEntryType;
|
||||
title?: string;
|
||||
bodyMarkdown: string;
|
||||
metadata?: Record<string, unknown>;
|
||||
}) {
|
||||
return await requestJson<{ entry: ResearchJournalEntry }>({
|
||||
path: '/api/research/journal',
|
||||
method: 'POST',
|
||||
body: {
|
||||
...input,
|
||||
ticker: input.ticker.trim().toUpperCase()
|
||||
}
|
||||
}, 'Unable to create journal entry');
|
||||
}
|
||||
|
||||
export async function updateResearchJournalEntry(id: number, input: {
|
||||
title?: string;
|
||||
bodyMarkdown?: string;
|
||||
metadata?: Record<string, unknown>;
|
||||
}) {
|
||||
return await requestJson<{ entry: ResearchJournalEntry }>({
|
||||
path: `/api/research/journal/${id}`,
|
||||
method: 'PATCH',
|
||||
body: input
|
||||
}, 'Unable to update journal entry');
|
||||
}
|
||||
|
||||
export async function deleteResearchJournalEntry(id: number) {
|
||||
const result = await client.api.research.journal[id].delete();
|
||||
return await unwrapData<{ success: boolean }>(result, 'Unable to delete journal entry');
|
||||
}
|
||||
|
||||
export async function listHoldings() {
|
||||
const result = await client.api.portfolio.holdings.get();
|
||||
return await unwrapData<{ holdings: Holding[] }>(result, 'Unable to fetch holdings');
|
||||
@@ -140,11 +238,22 @@ export async function upsertHolding(input: {
|
||||
shares: number;
|
||||
avgCost: number;
|
||||
currentPrice?: number;
|
||||
companyName?: string;
|
||||
}) {
|
||||
const result = await client.api.portfolio.holdings.post(input);
|
||||
return await unwrapData<{ holding: Holding }>(result, 'Unable to save holding');
|
||||
}
|
||||
|
||||
export async function updateHolding(id: number, input: {
|
||||
shares?: number;
|
||||
avgCost?: number;
|
||||
currentPrice?: number;
|
||||
companyName?: string;
|
||||
}) {
|
||||
const result = await client.api.portfolio.holdings[id].patch(input);
|
||||
return await unwrapData<{ holding: Holding }>(result, 'Unable to update holding');
|
||||
}
|
||||
|
||||
export async function deleteHolding(id: number) {
|
||||
const result = await client.api.portfolio.holdings[id].delete();
|
||||
return await unwrapData<{ success: boolean }>(result, 'Unable to delete holding');
|
||||
|
||||
@@ -14,6 +14,7 @@ export const queryKeys = {
|
||||
filings: (ticker: string | null, limit: number) => ['filings', ticker ?? '', limit] as const,
|
||||
report: (accessionNumber: string) => ['report', accessionNumber] as const,
|
||||
watchlist: () => ['watchlist'] as const,
|
||||
researchJournal: (ticker: string) => ['research', 'journal', ticker] as const,
|
||||
holdings: () => ['portfolio', 'holdings'] as const,
|
||||
portfolioSummary: () => ['portfolio', 'summary'] as const,
|
||||
latestPortfolioInsight: () => ['portfolio', 'insights', 'latest'] as const,
|
||||
|
||||
@@ -10,6 +10,7 @@ import {
|
||||
listFilings,
|
||||
listHoldings,
|
||||
listRecentTasks,
|
||||
listResearchJournal,
|
||||
listWatchlist
|
||||
} from '@/lib/api';
|
||||
import { queryKeys } from '@/lib/query/keys';
|
||||
@@ -103,6 +104,16 @@ export function watchlistQueryOptions() {
|
||||
});
|
||||
}
|
||||
|
||||
export function researchJournalQueryOptions(ticker: string) {
|
||||
const normalizedTicker = ticker.trim().toUpperCase();
|
||||
|
||||
return queryOptions({
|
||||
queryKey: queryKeys.researchJournal(normalizedTicker),
|
||||
queryFn: () => listResearchJournal(normalizedTicker),
|
||||
staleTime: 15_000
|
||||
});
|
||||
}
|
||||
|
||||
export function holdingsQueryOptions() {
|
||||
return queryOptions({
|
||||
queryKey: queryKeys.holdings(),
|
||||
|
||||
@@ -1,9 +1,12 @@
|
||||
import { Elysia, t } from 'elysia';
|
||||
import { getWorld } from 'workflow/runtime';
|
||||
import type {
|
||||
CoveragePriority,
|
||||
CoverageStatus,
|
||||
Filing,
|
||||
FinancialHistoryWindow,
|
||||
FinancialStatementKind,
|
||||
ResearchJournalEntryType,
|
||||
TaskStatus
|
||||
} from '@/lib/types';
|
||||
import { auth } from '@/lib/auth';
|
||||
@@ -15,18 +18,32 @@ import {
|
||||
getCompanyFinancialTaxonomy
|
||||
} from '@/lib/server/financial-taxonomy';
|
||||
import { redactInternalFilingAnalysisFields } from '@/lib/server/api/filing-redaction';
|
||||
import { getFilingByAccession, listFilingsRecords } from '@/lib/server/repos/filings';
|
||||
import {
|
||||
getFilingByAccession,
|
||||
listFilingsRecords,
|
||||
listLatestFilingDatesByTickers
|
||||
} from '@/lib/server/repos/filings';
|
||||
import {
|
||||
deleteHoldingByIdRecord,
|
||||
getHoldingByTicker,
|
||||
listUserHoldings,
|
||||
updateHoldingByIdRecord,
|
||||
upsertHoldingRecord
|
||||
} from '@/lib/server/repos/holdings';
|
||||
import { getLatestPortfolioInsight } from '@/lib/server/repos/insights';
|
||||
import {
|
||||
createResearchJournalEntryRecord,
|
||||
deleteResearchJournalEntryRecord,
|
||||
listResearchJournalEntries,
|
||||
updateResearchJournalEntryRecord
|
||||
} from '@/lib/server/repos/research-journal';
|
||||
import {
|
||||
deleteWatchlistItemRecord,
|
||||
getWatchlistItemById,
|
||||
getWatchlistItemByTicker,
|
||||
listWatchlistItems,
|
||||
updateWatchlistItemRecord,
|
||||
updateWatchlistReviewByTicker,
|
||||
upsertWatchlistItemRecord
|
||||
} from '@/lib/server/repos/watchlist';
|
||||
import { getPriceHistory, getQuote } from '@/lib/server/prices';
|
||||
@@ -52,6 +69,9 @@ const FINANCIAL_STATEMENT_KINDS: FinancialStatementKind[] = [
|
||||
'comprehensive_income'
|
||||
];
|
||||
const FINANCIAL_HISTORY_WINDOWS: FinancialHistoryWindow[] = ['10y', 'all'];
|
||||
const COVERAGE_STATUSES: CoverageStatus[] = ['backlog', 'active', 'watch', 'archive'];
|
||||
const COVERAGE_PRIORITIES: CoveragePriority[] = ['low', 'medium', 'high'];
|
||||
const JOURNAL_ENTRY_TYPES: ResearchJournalEntryType[] = ['note', 'filing_note', 'status_change'];
|
||||
|
||||
function asRecord(value: unknown): Record<string, unknown> {
|
||||
if (!value || typeof value !== 'object' || Array.isArray(value)) {
|
||||
@@ -94,6 +114,14 @@ function asOptionalString(value: unknown) {
|
||||
return normalized.length > 0 ? normalized : null;
|
||||
}
|
||||
|
||||
function asOptionalRecord(value: unknown) {
|
||||
if (!value || typeof value !== 'object' || Array.isArray(value)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return value as Record<string, unknown>;
|
||||
}
|
||||
|
||||
function asTags(value: unknown) {
|
||||
const source = Array.isArray(value)
|
||||
? value
|
||||
@@ -130,6 +158,31 @@ function asHistoryWindow(value: unknown): FinancialHistoryWindow {
|
||||
: '10y';
|
||||
}
|
||||
|
||||
function asCoverageStatus(value: unknown) {
|
||||
return COVERAGE_STATUSES.includes(value as CoverageStatus)
|
||||
? value as CoverageStatus
|
||||
: undefined;
|
||||
}
|
||||
|
||||
function asCoveragePriority(value: unknown) {
|
||||
return COVERAGE_PRIORITIES.includes(value as CoveragePriority)
|
||||
? value as CoveragePriority
|
||||
: undefined;
|
||||
}
|
||||
|
||||
function asJournalEntryType(value: unknown) {
|
||||
return JOURNAL_ENTRY_TYPES.includes(value as ResearchJournalEntryType)
|
||||
? value as ResearchJournalEntryType
|
||||
: undefined;
|
||||
}
|
||||
|
||||
function formatLabel(value: string) {
|
||||
return value
|
||||
.split('_')
|
||||
.map((part) => part.charAt(0).toUpperCase() + part.slice(1))
|
||||
.join(' ');
|
||||
}
|
||||
|
||||
function withFinancialMetricsPolicy(filing: Filing): Filing {
|
||||
if (FINANCIAL_FORMS.has(filing.filing_type)) {
|
||||
return filing;
|
||||
@@ -266,7 +319,14 @@ export const app = new Elysia({ prefix: '/api' })
|
||||
}
|
||||
|
||||
const items = await listWatchlistItems(session.user.id);
|
||||
return Response.json({ items });
|
||||
const latestFilingDates = await listLatestFilingDatesByTickers(items.map((item) => item.ticker));
|
||||
|
||||
return Response.json({
|
||||
items: items.map((item) => ({
|
||||
...item,
|
||||
latest_filing_date: latestFilingDates.get(item.ticker) ?? null
|
||||
}))
|
||||
});
|
||||
})
|
||||
.post('/watchlist', async ({ body }) => {
|
||||
const { session, response } = await requireAuthenticatedSession();
|
||||
@@ -280,6 +340,9 @@ export const app = new Elysia({ prefix: '/api' })
|
||||
const sector = asOptionalString(payload.sector) ?? '';
|
||||
const category = asOptionalString(payload.category) ?? '';
|
||||
const tags = asTags(payload.tags);
|
||||
const status = asCoverageStatus(payload.status);
|
||||
const priority = asCoveragePriority(payload.priority);
|
||||
const lastReviewedAt = asOptionalString(payload.lastReviewedAt);
|
||||
|
||||
if (!ticker) {
|
||||
return jsonError('ticker is required');
|
||||
@@ -296,7 +359,10 @@ export const app = new Elysia({ prefix: '/api' })
|
||||
companyName,
|
||||
sector,
|
||||
category,
|
||||
tags
|
||||
tags,
|
||||
status,
|
||||
priority,
|
||||
lastReviewedAt
|
||||
});
|
||||
|
||||
const autoFilingSyncQueued = created
|
||||
@@ -316,9 +382,94 @@ export const app = new Elysia({ prefix: '/api' })
|
||||
companyName: t.String({ minLength: 1 }),
|
||||
sector: t.Optional(t.String()),
|
||||
category: t.Optional(t.String()),
|
||||
tags: t.Optional(t.Union([t.Array(t.String()), t.String()]))
|
||||
tags: t.Optional(t.Union([t.Array(t.String()), t.String()])),
|
||||
status: t.Optional(t.Union([
|
||||
t.Literal('backlog'),
|
||||
t.Literal('active'),
|
||||
t.Literal('watch'),
|
||||
t.Literal('archive')
|
||||
])),
|
||||
priority: t.Optional(t.Union([
|
||||
t.Literal('low'),
|
||||
t.Literal('medium'),
|
||||
t.Literal('high')
|
||||
])),
|
||||
lastReviewedAt: t.Optional(t.String())
|
||||
})
|
||||
})
|
||||
.patch('/watchlist/:id', async ({ params, body }) => {
|
||||
const { session, response } = await requireAuthenticatedSession();
|
||||
if (response) {
|
||||
return response;
|
||||
}
|
||||
|
||||
const numericId = Number(params.id);
|
||||
if (!Number.isInteger(numericId) || numericId <= 0) {
|
||||
return jsonError('Invalid watchlist id', 400);
|
||||
}
|
||||
|
||||
const existing = await getWatchlistItemById(session.user.id, numericId);
|
||||
if (!existing) {
|
||||
return jsonError('Watchlist item not found', 404);
|
||||
}
|
||||
|
||||
const payload = asRecord(body);
|
||||
const nextStatus = payload.status === undefined
|
||||
? existing.status
|
||||
: asCoverageStatus(payload.status);
|
||||
const nextPriority = payload.priority === undefined
|
||||
? existing.priority
|
||||
: asCoveragePriority(payload.priority);
|
||||
|
||||
if (payload.status !== undefined && !nextStatus) {
|
||||
return jsonError('Invalid coverage status', 400);
|
||||
}
|
||||
|
||||
if (payload.priority !== undefined && !nextPriority) {
|
||||
return jsonError('Invalid coverage priority', 400);
|
||||
}
|
||||
|
||||
try {
|
||||
const item = await updateWatchlistItemRecord({
|
||||
userId: session.user.id,
|
||||
id: numericId,
|
||||
companyName: payload.companyName === undefined ? undefined : (typeof payload.companyName === 'string' ? payload.companyName : ''),
|
||||
sector: payload.sector === undefined ? undefined : (typeof payload.sector === 'string' ? payload.sector : ''),
|
||||
category: payload.category === undefined ? undefined : (typeof payload.category === 'string' ? payload.category : ''),
|
||||
tags: payload.tags === undefined ? undefined : asTags(payload.tags),
|
||||
status: nextStatus,
|
||||
priority: nextPriority,
|
||||
lastReviewedAt: payload.lastReviewedAt === undefined ? undefined : asOptionalString(payload.lastReviewedAt)
|
||||
});
|
||||
|
||||
if (!item) {
|
||||
return jsonError('Watchlist item not found', 404);
|
||||
}
|
||||
|
||||
const statusChanged = existing.status !== item.status;
|
||||
if (statusChanged) {
|
||||
await createResearchJournalEntryRecord({
|
||||
userId: session.user.id,
|
||||
ticker: item.ticker,
|
||||
entryType: 'status_change',
|
||||
title: `Coverage status changed to ${formatLabel(item.status)}`,
|
||||
bodyMarkdown: `Coverage status changed from ${formatLabel(existing.status)} to ${formatLabel(item.status)}.`,
|
||||
metadata: {
|
||||
previousStatus: existing.status,
|
||||
nextStatus: item.status,
|
||||
priority: item.priority
|
||||
}
|
||||
});
|
||||
}
|
||||
|
||||
return Response.json({
|
||||
item,
|
||||
statusChangeJournalCreated: statusChanged
|
||||
});
|
||||
} catch (error) {
|
||||
return jsonError(asErrorMessage(error, 'Failed to update coverage item'));
|
||||
}
|
||||
})
|
||||
.delete('/watchlist/:id', async ({ params }) => {
|
||||
const { session, response } = await requireAuthenticatedSession();
|
||||
if (response) {
|
||||
@@ -377,13 +528,15 @@ export const app = new Elysia({ prefix: '/api' })
|
||||
|
||||
try {
|
||||
const currentPrice = asPositiveNumber(payload.currentPrice) ?? avgCost;
|
||||
const companyName = asOptionalString(payload.companyName) ?? undefined;
|
||||
|
||||
const { holding, created } = await upsertHoldingRecord({
|
||||
userId: session.user.id,
|
||||
ticker,
|
||||
shares,
|
||||
avgCost,
|
||||
currentPrice
|
||||
currentPrice,
|
||||
companyName
|
||||
});
|
||||
|
||||
const autoFilingSyncQueued = created
|
||||
@@ -399,7 +552,8 @@ export const app = new Elysia({ prefix: '/api' })
|
||||
ticker: t.String({ minLength: 1 }),
|
||||
shares: t.Number({ exclusiveMinimum: 0 }),
|
||||
avgCost: t.Number({ exclusiveMinimum: 0 }),
|
||||
currentPrice: t.Optional(t.Number({ exclusiveMinimum: 0 }))
|
||||
currentPrice: t.Optional(t.Number({ exclusiveMinimum: 0 })),
|
||||
companyName: t.Optional(t.String())
|
||||
})
|
||||
})
|
||||
.patch('/portfolio/holdings/:id', async ({ params, body }) => {
|
||||
@@ -420,7 +574,8 @@ export const app = new Elysia({ prefix: '/api' })
|
||||
id: numericId,
|
||||
shares: asPositiveNumber(payload.shares) ?? undefined,
|
||||
avgCost: asPositiveNumber(payload.avgCost) ?? undefined,
|
||||
currentPrice: asPositiveNumber(payload.currentPrice) ?? undefined
|
||||
currentPrice: asPositiveNumber(payload.currentPrice) ?? undefined,
|
||||
companyName: payload.companyName === undefined ? undefined : (asOptionalString(payload.companyName) ?? '')
|
||||
});
|
||||
|
||||
if (!updated) {
|
||||
@@ -435,7 +590,8 @@ export const app = new Elysia({ prefix: '/api' })
|
||||
body: t.Object({
|
||||
shares: t.Optional(t.Number({ exclusiveMinimum: 0 })),
|
||||
avgCost: t.Optional(t.Number({ exclusiveMinimum: 0 })),
|
||||
currentPrice: t.Optional(t.Number({ exclusiveMinimum: 0 }))
|
||||
currentPrice: t.Optional(t.Number({ exclusiveMinimum: 0 })),
|
||||
companyName: t.Optional(t.String())
|
||||
})
|
||||
})
|
||||
.delete('/portfolio/holdings/:id', async ({ params }) => {
|
||||
@@ -522,6 +678,127 @@ export const app = new Elysia({ prefix: '/api' })
|
||||
|
||||
return Response.json({ insight });
|
||||
})
|
||||
.get('/research/journal', async ({ query }) => {
|
||||
const { session, response } = await requireAuthenticatedSession();
|
||||
if (response) {
|
||||
return response;
|
||||
}
|
||||
|
||||
const ticker = typeof query.ticker === 'string' ? query.ticker.trim().toUpperCase() : '';
|
||||
if (!ticker) {
|
||||
return jsonError('ticker is required');
|
||||
}
|
||||
|
||||
const entries = await listResearchJournalEntries(session.user.id, ticker);
|
||||
return Response.json({ entries });
|
||||
}, {
|
||||
query: t.Object({
|
||||
ticker: t.String({ minLength: 1 })
|
||||
})
|
||||
})
|
||||
.post('/research/journal', async ({ body }) => {
|
||||
const { session, response } = await requireAuthenticatedSession();
|
||||
if (response) {
|
||||
return response;
|
||||
}
|
||||
|
||||
const payload = asRecord(body);
|
||||
const ticker = typeof payload.ticker === 'string' ? payload.ticker.trim().toUpperCase() : '';
|
||||
const entryType = asJournalEntryType(payload.entryType);
|
||||
const title = asOptionalString(payload.title);
|
||||
const bodyMarkdown = typeof payload.bodyMarkdown === 'string' ? payload.bodyMarkdown.trim() : '';
|
||||
const accessionNumber = asOptionalString(payload.accessionNumber);
|
||||
const metadata = asOptionalRecord(payload.metadata);
|
||||
|
||||
if (!ticker) {
|
||||
return jsonError('ticker is required');
|
||||
}
|
||||
|
||||
if (!entryType) {
|
||||
return jsonError('entryType is required');
|
||||
}
|
||||
|
||||
if (!bodyMarkdown) {
|
||||
return jsonError('bodyMarkdown is required');
|
||||
}
|
||||
|
||||
try {
|
||||
const entry = await createResearchJournalEntryRecord({
|
||||
userId: session.user.id,
|
||||
ticker,
|
||||
entryType,
|
||||
title,
|
||||
bodyMarkdown,
|
||||
accessionNumber,
|
||||
metadata
|
||||
});
|
||||
|
||||
await updateWatchlistReviewByTicker(session.user.id, ticker, entry.updated_at);
|
||||
|
||||
return Response.json({ entry });
|
||||
} catch (error) {
|
||||
return jsonError(asErrorMessage(error, 'Failed to create journal entry'));
|
||||
}
|
||||
})
|
||||
.patch('/research/journal/:id', async ({ params, body }) => {
|
||||
const { session, response } = await requireAuthenticatedSession();
|
||||
if (response) {
|
||||
return response;
|
||||
}
|
||||
|
||||
const numericId = Number(params.id);
|
||||
if (!Number.isInteger(numericId) || numericId <= 0) {
|
||||
return jsonError('Invalid journal id', 400);
|
||||
}
|
||||
|
||||
const payload = asRecord(body);
|
||||
const title = payload.title === undefined ? undefined : asOptionalString(payload.title);
|
||||
const bodyMarkdown = payload.bodyMarkdown === undefined
|
||||
? undefined
|
||||
: (typeof payload.bodyMarkdown === 'string' ? payload.bodyMarkdown : '');
|
||||
|
||||
try {
|
||||
const entry = await updateResearchJournalEntryRecord({
|
||||
userId: session.user.id,
|
||||
id: numericId,
|
||||
title,
|
||||
bodyMarkdown,
|
||||
metadata: payload.metadata === undefined ? undefined : asOptionalRecord(payload.metadata)
|
||||
});
|
||||
|
||||
if (!entry) {
|
||||
return jsonError('Journal entry not found', 404);
|
||||
}
|
||||
|
||||
await updateWatchlistReviewByTicker(session.user.id, entry.ticker, entry.updated_at);
|
||||
|
||||
return Response.json({ entry });
|
||||
} catch (error) {
|
||||
return jsonError(asErrorMessage(error, 'Failed to update journal entry'));
|
||||
}
|
||||
})
|
||||
.delete('/research/journal/:id', async ({ params }) => {
|
||||
const { session, response } = await requireAuthenticatedSession();
|
||||
if (response) {
|
||||
return response;
|
||||
}
|
||||
|
||||
const numericId = Number(params.id);
|
||||
if (!Number.isInteger(numericId) || numericId <= 0) {
|
||||
return jsonError('Invalid journal id', 400);
|
||||
}
|
||||
|
||||
const removed = await deleteResearchJournalEntryRecord(session.user.id, numericId);
|
||||
if (!removed) {
|
||||
return jsonError('Journal entry not found', 404);
|
||||
}
|
||||
|
||||
return Response.json({ success: true });
|
||||
}, {
|
||||
params: t.Object({
|
||||
id: t.String({ minLength: 1 })
|
||||
})
|
||||
})
|
||||
.get('/analysis/company', async ({ query }) => {
|
||||
const { session, response } = await requireAuthenticatedSession();
|
||||
if (response) {
|
||||
@@ -533,22 +810,21 @@ export const app = new Elysia({ prefix: '/api' })
|
||||
return jsonError('ticker is required');
|
||||
}
|
||||
|
||||
const [filings, holdings, watchlist, liveQuote, priceHistory] = await Promise.all([
|
||||
const [filings, holding, watchlistItem, liveQuote, priceHistory, journalPreview] = await Promise.all([
|
||||
listFilingsRecords({ ticker, limit: 40 }),
|
||||
listUserHoldings(session.user.id),
|
||||
listWatchlistItems(session.user.id),
|
||||
getHoldingByTicker(session.user.id, ticker),
|
||||
getWatchlistItemByTicker(session.user.id, ticker),
|
||||
getQuote(ticker),
|
||||
getPriceHistory(ticker)
|
||||
getPriceHistory(ticker),
|
||||
listResearchJournalEntries(session.user.id, ticker, 6)
|
||||
]);
|
||||
const redactedFilings = filings
|
||||
.map(redactInternalFilingAnalysisFields)
|
||||
.map(withFinancialMetricsPolicy);
|
||||
|
||||
const latestFiling = redactedFilings[0] ?? null;
|
||||
const holding = holdings.find((entry) => entry.ticker === ticker) ?? null;
|
||||
const watchlistItem = watchlist.find((entry) => entry.ticker === ticker) ?? null;
|
||||
|
||||
const companyName = latestFiling?.company_name
|
||||
?? holding?.company_name
|
||||
?? watchlistItem?.company_name
|
||||
?? ticker;
|
||||
|
||||
@@ -575,6 +851,30 @@ export const app = new Elysia({ prefix: '/api' })
|
||||
model: entry.analysis?.model ?? 'unknown',
|
||||
summary: entry.analysis?.text ?? entry.analysis?.legacyInsights ?? ''
|
||||
}));
|
||||
const latestMetricsFiling = redactedFilings.find((entry) => entry.metrics) ?? null;
|
||||
const referenceMetrics = latestMetricsFiling?.metrics ?? null;
|
||||
const keyMetrics = {
|
||||
referenceDate: latestMetricsFiling?.filing_date ?? latestFiling?.filing_date ?? null,
|
||||
revenue: referenceMetrics?.revenue ?? null,
|
||||
netIncome: referenceMetrics?.netIncome ?? null,
|
||||
totalAssets: referenceMetrics?.totalAssets ?? null,
|
||||
cash: referenceMetrics?.cash ?? null,
|
||||
debt: referenceMetrics?.debt ?? null,
|
||||
netMargin: referenceMetrics?.revenue && referenceMetrics.netIncome !== null
|
||||
? (referenceMetrics.netIncome / referenceMetrics.revenue) * 100
|
||||
: null
|
||||
};
|
||||
const latestFilingSummary = latestFiling
|
||||
? {
|
||||
accessionNumber: latestFiling.accession_number,
|
||||
filingDate: latestFiling.filing_date,
|
||||
filingType: latestFiling.filing_type,
|
||||
filingUrl: latestFiling.filing_url,
|
||||
submissionUrl: latestFiling.submission_url ?? null,
|
||||
summary: latestFiling.analysis?.text ?? latestFiling.analysis?.legacyInsights ?? null,
|
||||
hasAnalysis: Boolean(latestFiling.analysis?.text || latestFiling.analysis?.legacyInsights)
|
||||
}
|
||||
: null;
|
||||
|
||||
return Response.json({
|
||||
analysis: {
|
||||
@@ -591,7 +891,17 @@ export const app = new Elysia({ prefix: '/api' })
|
||||
priceHistory,
|
||||
financials,
|
||||
filings: redactedFilings.slice(0, 20),
|
||||
aiReports
|
||||
aiReports,
|
||||
coverage: watchlistItem
|
||||
? {
|
||||
...watchlistItem,
|
||||
latest_filing_date: latestFiling?.filing_date ?? watchlistItem.latest_filing_date ?? null
|
||||
}
|
||||
: null,
|
||||
journalPreview,
|
||||
recentAiReports: aiReports.slice(0, 5),
|
||||
latestFilingSummary,
|
||||
keyMetrics
|
||||
}
|
||||
});
|
||||
}, {
|
||||
|
||||
@@ -10,6 +10,7 @@ import {
|
||||
import { mkdtempSync, readFileSync, rmSync } from 'node:fs';
|
||||
import { tmpdir } from 'node:os';
|
||||
import { join } from 'node:path';
|
||||
import { Database } from 'bun:sqlite';
|
||||
import type { WorkflowRunStatus } from '@workflow/world';
|
||||
|
||||
const TEST_USER_ID = 'e2e-user';
|
||||
@@ -21,7 +22,7 @@ let runCounter = 0;
|
||||
let workflowBackendHealthy = true;
|
||||
|
||||
let tempDir: string | null = null;
|
||||
let sqliteClient: { exec: (query: string) => void; close: () => void } | null = null;
|
||||
let sqliteClient: Database | null = null;
|
||||
let app: { handle: (request: Request) => Promise<Response> } | null = null;
|
||||
|
||||
mock.module('workflow/api', () => ({
|
||||
@@ -87,7 +88,8 @@ function applySqlMigrations(client: { exec: (query: string) => void }) {
|
||||
'0002_workflow_task_projection_metadata.sql',
|
||||
'0003_task_stage_event_timeline.sql',
|
||||
'0004_watchlist_company_taxonomy.sql',
|
||||
'0005_financial_taxonomy_v3.sql'
|
||||
'0005_financial_taxonomy_v3.sql',
|
||||
'0006_coverage_journal_tracking.sql'
|
||||
];
|
||||
|
||||
for (const file of migrationFiles) {
|
||||
@@ -121,10 +123,72 @@ function ensureTestUser(client: { exec: (query: string) => void }) {
|
||||
function clearProjectionTables(client: { exec: (query: string) => void }) {
|
||||
client.exec('DELETE FROM task_stage_event;');
|
||||
client.exec('DELETE FROM task_run;');
|
||||
client.exec('DELETE FROM research_journal_entry;');
|
||||
client.exec('DELETE FROM holding;');
|
||||
client.exec('DELETE FROM watchlist_item;');
|
||||
client.exec('DELETE FROM portfolio_insight;');
|
||||
client.exec('DELETE FROM filing;');
|
||||
}
|
||||
|
||||
function seedFilingRecord(client: Database, input: {
|
||||
ticker: string;
|
||||
accessionNumber: string;
|
||||
filingType: '10-K' | '10-Q' | '8-K';
|
||||
filingDate: string;
|
||||
companyName: string;
|
||||
cik?: string;
|
||||
metrics?: {
|
||||
revenue: number | null;
|
||||
netIncome: number | null;
|
||||
totalAssets: number | null;
|
||||
cash: number | null;
|
||||
debt: number | null;
|
||||
} | null;
|
||||
analysisText?: string | null;
|
||||
}) {
|
||||
const now = new Date().toISOString();
|
||||
|
||||
client.query(`
|
||||
INSERT INTO filing (
|
||||
ticker,
|
||||
filing_type,
|
||||
filing_date,
|
||||
accession_number,
|
||||
cik,
|
||||
company_name,
|
||||
filing_url,
|
||||
submission_url,
|
||||
primary_document,
|
||||
metrics,
|
||||
analysis,
|
||||
created_at,
|
||||
updated_at
|
||||
) VALUES (?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?, ?);
|
||||
`).run(
|
||||
input.ticker,
|
||||
input.filingType,
|
||||
input.filingDate,
|
||||
input.accessionNumber,
|
||||
input.cik ?? '0000000000',
|
||||
input.companyName,
|
||||
`https://www.sec.gov/Archives/${input.accessionNumber}.htm`,
|
||||
`https://www.sec.gov/submissions/${input.accessionNumber}.json`,
|
||||
`${input.accessionNumber}.htm`,
|
||||
input.metrics ? JSON.stringify(input.metrics) : null,
|
||||
input.analysisText
|
||||
? JSON.stringify({
|
||||
provider: 'test',
|
||||
model: 'fixture',
|
||||
text: input.analysisText
|
||||
})
|
||||
: null,
|
||||
now,
|
||||
now
|
||||
);
|
||||
}
|
||||
|
||||
async function jsonRequest(
|
||||
method: 'GET' | 'POST' | 'PATCH',
|
||||
method: 'GET' | 'POST' | 'PATCH' | 'DELETE',
|
||||
path: string,
|
||||
body?: Record<string, unknown>
|
||||
) {
|
||||
@@ -154,8 +218,8 @@ if (process.env.RUN_TASK_WORKFLOW_E2E === '1') {
|
||||
|
||||
resetDbSingletons();
|
||||
|
||||
const dbModule = await import('@/lib/server/db');
|
||||
sqliteClient = dbModule.getSqliteClient();
|
||||
sqliteClient = new Database(join(tempDir, 'e2e.sqlite'), { create: true });
|
||||
sqliteClient.exec('PRAGMA foreign_keys = ON;');
|
||||
applySqlMigrations(sqliteClient);
|
||||
ensureTestUser(sqliteClient);
|
||||
|
||||
@@ -164,6 +228,7 @@ if (process.env.RUN_TASK_WORKFLOW_E2E === '1') {
|
||||
});
|
||||
|
||||
afterAll(() => {
|
||||
sqliteClient?.close();
|
||||
resetDbSingletons();
|
||||
|
||||
if (tempDir) {
|
||||
@@ -291,6 +356,199 @@ if (process.env.RUN_TASK_WORKFLOW_E2E === '1') {
|
||||
expect(task.payload.tags).toEqual(['semis', 'ai']);
|
||||
});
|
||||
|
||||
it('updates coverage status and archives while appending status-change journal history', async () => {
|
||||
const created = await jsonRequest('POST', '/api/watchlist', {
|
||||
ticker: 'amd',
|
||||
companyName: 'Advanced Micro Devices, Inc.',
|
||||
sector: 'Technology',
|
||||
status: 'backlog',
|
||||
priority: 'medium',
|
||||
tags: ['semis']
|
||||
});
|
||||
|
||||
expect(created.response.status).toBe(200);
|
||||
const createdItem = (created.json as {
|
||||
item: { id: number; ticker: string; status: string; priority: string };
|
||||
}).item;
|
||||
expect(createdItem.status).toBe('backlog');
|
||||
expect(createdItem.priority).toBe('medium');
|
||||
|
||||
const activated = await jsonRequest('PATCH', `/api/watchlist/${createdItem.id}`, {
|
||||
status: 'active',
|
||||
priority: 'high',
|
||||
lastReviewedAt: '2026-03-01T15:30:00.000Z'
|
||||
});
|
||||
expect(activated.response.status).toBe(200);
|
||||
const activatedBody = activated.json as {
|
||||
item: { status: string; priority: string; last_reviewed_at: string | null };
|
||||
statusChangeJournalCreated: boolean;
|
||||
};
|
||||
expect(activatedBody.item.status).toBe('active');
|
||||
expect(activatedBody.item.priority).toBe('high');
|
||||
expect(activatedBody.item.last_reviewed_at).toBe('2026-03-01T15:30:00.000Z');
|
||||
expect(activatedBody.statusChangeJournalCreated).toBe(true);
|
||||
|
||||
const archived = await jsonRequest('PATCH', `/api/watchlist/${createdItem.id}`, {
|
||||
status: 'archive'
|
||||
});
|
||||
expect(archived.response.status).toBe(200);
|
||||
expect((archived.json as {
|
||||
item: { status: string };
|
||||
statusChangeJournalCreated: boolean;
|
||||
}).item.status).toBe('archive');
|
||||
|
||||
const journal = await jsonRequest('GET', '/api/research/journal?ticker=AMD');
|
||||
expect(journal.response.status).toBe(200);
|
||||
const entries = (journal.json as {
|
||||
entries: Array<{
|
||||
entry_type: string;
|
||||
title: string | null;
|
||||
}>;
|
||||
}).entries;
|
||||
expect(entries.length).toBe(2);
|
||||
expect(entries.every((entry) => entry.entry_type === 'status_change')).toBe(true);
|
||||
expect(entries[0]?.title).toContain('Archive');
|
||||
|
||||
const coverage = await jsonRequest('GET', '/api/watchlist');
|
||||
const saved = (coverage.json as {
|
||||
items: Array<{
|
||||
ticker: string;
|
||||
status: string;
|
||||
priority: string;
|
||||
}>;
|
||||
}).items.find((item) => item.ticker === 'AMD');
|
||||
expect(saved?.status).toBe('archive');
|
||||
expect(saved?.priority).toBe('high');
|
||||
});
|
||||
|
||||
it('supports journal CRUD and includes coverage, preview, reports, and key metrics in analysis payload', async () => {
|
||||
if (!sqliteClient) {
|
||||
throw new Error('sqlite client not initialized');
|
||||
}
|
||||
|
||||
seedFilingRecord(sqliteClient, {
|
||||
ticker: 'NFLX',
|
||||
accessionNumber: '0000000000-26-000777',
|
||||
filingType: '10-K',
|
||||
filingDate: '2026-02-15',
|
||||
companyName: 'Netflix, Inc.',
|
||||
metrics: {
|
||||
revenue: 41000000000,
|
||||
netIncome: 8600000000,
|
||||
totalAssets: 52000000000,
|
||||
cash: 7800000000,
|
||||
debt: 14000000000
|
||||
},
|
||||
analysisText: 'Subscriber growth reaccelerated with improved operating leverage.'
|
||||
});
|
||||
|
||||
await jsonRequest('POST', '/api/watchlist', {
|
||||
ticker: 'nflx',
|
||||
companyName: 'Netflix, Inc.',
|
||||
sector: 'Communication Services',
|
||||
status: 'active',
|
||||
priority: 'high',
|
||||
tags: ['streaming', 'quality']
|
||||
});
|
||||
|
||||
await jsonRequest('POST', '/api/portfolio/holdings', {
|
||||
ticker: 'NFLX',
|
||||
companyName: 'Netflix, Inc.',
|
||||
shares: 12,
|
||||
avgCost: 440,
|
||||
currentPrice: 455
|
||||
});
|
||||
|
||||
const createdEntry = await jsonRequest('POST', '/api/research/journal', {
|
||||
ticker: 'NFLX',
|
||||
entryType: 'note',
|
||||
title: 'Thesis refresh',
|
||||
bodyMarkdown: 'Monitor ad-tier margin progression and content amortization.'
|
||||
});
|
||||
expect(createdEntry.response.status).toBe(200);
|
||||
const entryId = (createdEntry.json as {
|
||||
entry: { id: number };
|
||||
}).entry.id;
|
||||
|
||||
const analysis = await jsonRequest('GET', '/api/analysis/company?ticker=NFLX');
|
||||
expect(analysis.response.status).toBe(200);
|
||||
const payload = (analysis.json as {
|
||||
analysis: {
|
||||
coverage: { status: string; priority: string; tags: string[] } | null;
|
||||
journalPreview: Array<{ title: string | null; body_markdown: string }>;
|
||||
recentAiReports: Array<{ accessionNumber: string; summary: string }>;
|
||||
latestFilingSummary: { accessionNumber: string; summary: string | null } | null;
|
||||
keyMetrics: { revenue: number | null; netMargin: number | null };
|
||||
position: { company_name: string | null } | null;
|
||||
};
|
||||
}).analysis;
|
||||
|
||||
expect(payload.coverage?.status).toBe('active');
|
||||
expect(payload.coverage?.priority).toBe('high');
|
||||
expect(payload.coverage?.tags).toEqual(['streaming', 'quality']);
|
||||
expect(payload.journalPreview.length).toBe(1);
|
||||
expect(payload.journalPreview[0]?.title).toBe('Thesis refresh');
|
||||
expect(payload.recentAiReports.length).toBe(1);
|
||||
expect(payload.latestFilingSummary?.accessionNumber).toBe('0000000000-26-000777');
|
||||
expect(payload.latestFilingSummary?.summary).toContain('Subscriber growth reaccelerated');
|
||||
expect(payload.keyMetrics.revenue).toBe(41000000000);
|
||||
expect(payload.keyMetrics.netMargin).not.toBeNull();
|
||||
expect(payload.position?.company_name).toBe('Netflix, Inc.');
|
||||
|
||||
const updatedEntry = await jsonRequest('PATCH', `/api/research/journal/${entryId}`, {
|
||||
title: 'Thesis refresh v2',
|
||||
bodyMarkdown: 'Monitor ad-tier margin progression, churn, and cash content spend.'
|
||||
});
|
||||
expect(updatedEntry.response.status).toBe(200);
|
||||
expect((updatedEntry.json as {
|
||||
entry: { title: string | null; body_markdown: string };
|
||||
}).entry.title).toBe('Thesis refresh v2');
|
||||
|
||||
const journalAfterUpdate = await jsonRequest('GET', '/api/research/journal?ticker=NFLX');
|
||||
expect(journalAfterUpdate.response.status).toBe(200);
|
||||
expect((journalAfterUpdate.json as {
|
||||
entries: Array<{ title: string | null; body_markdown: string }>;
|
||||
}).entries[0]?.body_markdown).toContain('cash content spend');
|
||||
|
||||
const removed = await jsonRequest('DELETE', `/api/research/journal/${entryId}`);
|
||||
expect(removed.response.status).toBe(200);
|
||||
|
||||
const journalAfterDelete = await jsonRequest('GET', '/api/research/journal?ticker=NFLX');
|
||||
expect((journalAfterDelete.json as {
|
||||
entries: unknown[];
|
||||
}).entries).toHaveLength(0);
|
||||
});
|
||||
|
||||
it('persists nullable holding company names and allows later enrichment', async () => {
|
||||
const created = await jsonRequest('POST', '/api/portfolio/holdings', {
|
||||
ticker: 'ORCL',
|
||||
shares: 5,
|
||||
avgCost: 100,
|
||||
currentPrice: 110
|
||||
});
|
||||
expect(created.response.status).toBe(200);
|
||||
|
||||
const holdings = await jsonRequest('GET', '/api/portfolio/holdings');
|
||||
expect(holdings.response.status).toBe(200);
|
||||
const saved = (holdings.json as {
|
||||
holdings: Array<{
|
||||
id: number;
|
||||
ticker: string;
|
||||
company_name: string | null;
|
||||
}>;
|
||||
}).holdings.find((entry) => entry.ticker === 'ORCL');
|
||||
|
||||
expect(saved?.company_name).toBeNull();
|
||||
|
||||
const updated = await jsonRequest('PATCH', `/api/portfolio/holdings/${saved?.id}`, {
|
||||
companyName: 'Oracle Corporation'
|
||||
});
|
||||
expect(updated.response.status).toBe(200);
|
||||
expect((updated.json as {
|
||||
holding: { company_name: string | null };
|
||||
}).holding.company_name).toBe('Oracle Corporation');
|
||||
});
|
||||
|
||||
it('updates notification read and silenced state via patch endpoint', async () => {
|
||||
const created = await jsonRequest('POST', '/api/filings/0000000000-26-000010/analyze');
|
||||
const taskId = (created.json as { task: { id: string } }).task.id;
|
||||
|
||||
@@ -20,14 +20,23 @@ describe('sqlite schema compatibility bootstrap', () => {
|
||||
applyMigration(client, '0003_task_stage_event_timeline.sql');
|
||||
|
||||
expect(__dbInternals.hasColumn(client, 'watchlist_item', 'category')).toBe(false);
|
||||
expect(__dbInternals.hasColumn(client, 'watchlist_item', 'status')).toBe(false);
|
||||
expect(__dbInternals.hasColumn(client, 'holding', 'company_name')).toBe(false);
|
||||
expect(__dbInternals.hasTable(client, 'filing_taxonomy_snapshot')).toBe(false);
|
||||
expect(__dbInternals.hasTable(client, 'research_journal_entry')).toBe(false);
|
||||
|
||||
__dbInternals.ensureLocalSqliteSchema(client);
|
||||
|
||||
expect(__dbInternals.hasColumn(client, 'watchlist_item', 'category')).toBe(true);
|
||||
expect(__dbInternals.hasColumn(client, 'watchlist_item', 'tags')).toBe(true);
|
||||
expect(__dbInternals.hasColumn(client, 'watchlist_item', 'status')).toBe(true);
|
||||
expect(__dbInternals.hasColumn(client, 'watchlist_item', 'priority')).toBe(true);
|
||||
expect(__dbInternals.hasColumn(client, 'watchlist_item', 'updated_at')).toBe(true);
|
||||
expect(__dbInternals.hasColumn(client, 'watchlist_item', 'last_reviewed_at')).toBe(true);
|
||||
expect(__dbInternals.hasColumn(client, 'holding', 'company_name')).toBe(true);
|
||||
expect(__dbInternals.hasTable(client, 'filing_taxonomy_snapshot')).toBe(true);
|
||||
expect(__dbInternals.hasTable(client, 'filing_taxonomy_fact')).toBe(true);
|
||||
expect(__dbInternals.hasTable(client, 'research_journal_entry')).toBe(true);
|
||||
|
||||
client.close();
|
||||
});
|
||||
|
||||
@@ -78,7 +78,11 @@ function ensureLocalSqliteSchema(client: Database) {
|
||||
if (hasTable(client, 'watchlist_item')) {
|
||||
const missingWatchlistColumns: Array<{ name: string; sql: string }> = [
|
||||
{ name: 'category', sql: 'ALTER TABLE `watchlist_item` ADD `category` text;' },
|
||||
{ name: 'tags', sql: 'ALTER TABLE `watchlist_item` ADD `tags` text;' }
|
||||
{ name: 'tags', sql: 'ALTER TABLE `watchlist_item` ADD `tags` text;' },
|
||||
{ name: 'status', sql: "ALTER TABLE `watchlist_item` ADD `status` text NOT NULL DEFAULT 'backlog';" },
|
||||
{ name: 'priority', sql: "ALTER TABLE `watchlist_item` ADD `priority` text NOT NULL DEFAULT 'medium';" },
|
||||
{ name: 'updated_at', sql: "ALTER TABLE `watchlist_item` ADD `updated_at` text NOT NULL DEFAULT '';" },
|
||||
{ name: 'last_reviewed_at', sql: 'ALTER TABLE `watchlist_item` ADD `last_reviewed_at` text;' }
|
||||
];
|
||||
|
||||
for (const column of missingWatchlistColumns) {
|
||||
@@ -86,11 +90,54 @@ function ensureLocalSqliteSchema(client: Database) {
|
||||
client.exec(column.sql);
|
||||
}
|
||||
}
|
||||
|
||||
client.exec(`
|
||||
UPDATE \`watchlist_item\`
|
||||
SET
|
||||
\`status\` = CASE
|
||||
WHEN \`status\` IS NULL OR TRIM(\`status\`) = '' THEN 'backlog'
|
||||
ELSE \`status\`
|
||||
END,
|
||||
\`priority\` = CASE
|
||||
WHEN \`priority\` IS NULL OR TRIM(\`priority\`) = '' THEN 'medium'
|
||||
ELSE \`priority\`
|
||||
END,
|
||||
\`updated_at\` = CASE
|
||||
WHEN \`updated_at\` IS NULL OR TRIM(\`updated_at\`) = '' THEN COALESCE(NULLIF(\`created_at\`, ''), CURRENT_TIMESTAMP)
|
||||
ELSE \`updated_at\`
|
||||
END;
|
||||
`);
|
||||
|
||||
client.exec('CREATE INDEX IF NOT EXISTS `watchlist_user_updated_idx` ON `watchlist_item` (`user_id`, `updated_at`);');
|
||||
}
|
||||
|
||||
if (hasTable(client, 'holding') && !hasColumn(client, 'holding', 'company_name')) {
|
||||
client.exec('ALTER TABLE `holding` ADD `company_name` text;');
|
||||
}
|
||||
|
||||
if (!hasTable(client, 'filing_taxonomy_snapshot')) {
|
||||
applySqlFile(client, '0005_financial_taxonomy_v3.sql');
|
||||
}
|
||||
|
||||
if (!hasTable(client, 'research_journal_entry')) {
|
||||
client.exec(`
|
||||
CREATE TABLE IF NOT EXISTS \`research_journal_entry\` (
|
||||
\`id\` integer PRIMARY KEY AUTOINCREMENT NOT NULL,
|
||||
\`user_id\` text NOT NULL,
|
||||
\`ticker\` text NOT NULL,
|
||||
\`accession_number\` text,
|
||||
\`entry_type\` text NOT NULL,
|
||||
\`title\` text,
|
||||
\`body_markdown\` text NOT NULL,
|
||||
\`metadata\` text,
|
||||
\`created_at\` text NOT NULL,
|
||||
\`updated_at\` text NOT NULL,
|
||||
FOREIGN KEY (\`user_id\`) REFERENCES \`user\`(\`id\`) ON UPDATE no action ON DELETE cascade
|
||||
);
|
||||
`);
|
||||
client.exec('CREATE INDEX IF NOT EXISTS `research_journal_ticker_idx` ON `research_journal_entry` (`user_id`, `ticker`, `created_at`);');
|
||||
client.exec('CREATE INDEX IF NOT EXISTS `research_journal_accession_idx` ON `research_journal_entry` (`user_id`, `accession_number`);');
|
||||
}
|
||||
}
|
||||
|
||||
export function getSqliteClient() {
|
||||
|
||||
@@ -27,6 +27,9 @@ type TaxonomyAssetType =
|
||||
|
||||
type TaxonomyParseStatus = 'ready' | 'partial' | 'failed';
|
||||
type TaxonomyMetricValidationStatus = 'not_run' | 'matched' | 'mismatch' | 'error';
|
||||
type CoverageStatus = 'backlog' | 'active' | 'watch' | 'archive';
|
||||
type CoveragePriority = 'low' | 'medium' | 'high';
|
||||
type ResearchJournalEntryType = 'note' | 'filing_note' | 'status_change';
|
||||
|
||||
type FilingAnalysis = {
|
||||
provider?: string;
|
||||
@@ -269,16 +272,22 @@ export const watchlistItem = sqliteTable('watchlist_item', {
|
||||
sector: text('sector'),
|
||||
category: text('category'),
|
||||
tags: text('tags', { mode: 'json' }).$type<string[]>(),
|
||||
created_at: text('created_at').notNull()
|
||||
status: text('status').$type<CoverageStatus>().notNull().default('backlog'),
|
||||
priority: text('priority').$type<CoveragePriority>().notNull().default('medium'),
|
||||
created_at: text('created_at').notNull(),
|
||||
updated_at: text('updated_at').notNull(),
|
||||
last_reviewed_at: text('last_reviewed_at')
|
||||
}, (table) => ({
|
||||
watchlistUserTickerUnique: uniqueIndex('watchlist_user_ticker_uidx').on(table.user_id, table.ticker),
|
||||
watchlistUserCreatedIndex: index('watchlist_user_created_idx').on(table.user_id, table.created_at)
|
||||
watchlistUserCreatedIndex: index('watchlist_user_created_idx').on(table.user_id, table.created_at),
|
||||
watchlistUserUpdatedIndex: index('watchlist_user_updated_idx').on(table.user_id, table.updated_at)
|
||||
}));
|
||||
|
||||
export const holding = sqliteTable('holding', {
|
||||
id: integer('id').primaryKey({ autoIncrement: true }),
|
||||
user_id: text('user_id').notNull().references(() => user.id, { onDelete: 'cascade' }),
|
||||
ticker: text('ticker').notNull(),
|
||||
company_name: text('company_name'),
|
||||
shares: numeric('shares').notNull(),
|
||||
avg_cost: numeric('avg_cost').notNull(),
|
||||
current_price: numeric('current_price'),
|
||||
@@ -520,6 +529,22 @@ export const portfolioInsight = sqliteTable('portfolio_insight', {
|
||||
insightUserCreatedIndex: index('insight_user_created_idx').on(table.user_id, table.created_at)
|
||||
}));
|
||||
|
||||
export const researchJournalEntry = sqliteTable('research_journal_entry', {
|
||||
id: integer('id').primaryKey({ autoIncrement: true }),
|
||||
user_id: text('user_id').notNull().references(() => user.id, { onDelete: 'cascade' }),
|
||||
ticker: text('ticker').notNull(),
|
||||
accession_number: text('accession_number'),
|
||||
entry_type: text('entry_type').$type<ResearchJournalEntryType>().notNull(),
|
||||
title: text('title'),
|
||||
body_markdown: text('body_markdown').notNull(),
|
||||
metadata: text('metadata', { mode: 'json' }).$type<Record<string, unknown> | null>(),
|
||||
created_at: text('created_at').notNull(),
|
||||
updated_at: text('updated_at').notNull()
|
||||
}, (table) => ({
|
||||
researchJournalTickerIndex: index('research_journal_ticker_idx').on(table.user_id, table.ticker, table.created_at),
|
||||
researchJournalAccessionIndex: index('research_journal_accession_idx').on(table.user_id, table.accession_number)
|
||||
}));
|
||||
|
||||
export const authSchema = {
|
||||
user,
|
||||
session,
|
||||
@@ -543,7 +568,8 @@ export const appSchema = {
|
||||
filingLink,
|
||||
taskRun,
|
||||
taskStageEvent,
|
||||
portfolioInsight
|
||||
portfolioInsight,
|
||||
researchJournalEntry
|
||||
};
|
||||
|
||||
export const schema = {
|
||||
|
||||
@@ -707,6 +707,85 @@ function latestMetrics(snapshots: FilingTaxonomySnapshotRecord[]) {
|
||||
};
|
||||
}
|
||||
|
||||
function rowValue(row: { values: Record<string, number | null> }, periodId: string) {
|
||||
return periodId in row.values ? row.values[periodId] : null;
|
||||
}
|
||||
|
||||
function findStandardizedRow(rows: StandardizedStatementRow[], key: string) {
|
||||
return rows.find((row) => row.key === key) ?? null;
|
||||
}
|
||||
|
||||
function findFaithfulRowByLocalNames(rows: TaxonomyStatementRow[], localNames: string[]) {
|
||||
const normalizedNames = localNames.map((name) => name.toLowerCase());
|
||||
const exact = rows.find((row) => normalizedNames.includes(row.localName.toLowerCase()));
|
||||
if (exact) {
|
||||
return exact;
|
||||
}
|
||||
|
||||
return rows.find((row) => {
|
||||
const haystack = `${row.key} ${row.label} ${row.localName} ${row.qname}`.toLowerCase();
|
||||
return normalizedNames.some((name) => haystack.includes(name));
|
||||
}) ?? null;
|
||||
}
|
||||
|
||||
function asOverviewLabel(period: FinancialStatementPeriod) {
|
||||
const source = period.periodEnd ?? period.filingDate;
|
||||
const parsed = Date.parse(source);
|
||||
if (!Number.isFinite(parsed)) {
|
||||
return source;
|
||||
}
|
||||
|
||||
return new Intl.DateTimeFormat('en-US', {
|
||||
month: 'short',
|
||||
year: 'numeric'
|
||||
}).format(new Date(parsed));
|
||||
}
|
||||
|
||||
function buildOverviewMetrics(input: {
|
||||
periods: FinancialStatementPeriod[];
|
||||
faithfulRows: TaxonomyStatementRow[];
|
||||
standardizedRows: StandardizedStatementRow[];
|
||||
}): CompanyFinancialStatementsResponse['overviewMetrics'] {
|
||||
const periods = [...input.periods].sort(periodSorter);
|
||||
const revenueRow = findStandardizedRow(input.standardizedRows, 'revenue')
|
||||
?? findFaithfulRowByLocalNames(input.faithfulRows, ['RevenueFromContractWithCustomerExcludingAssessedTax', 'Revenues', 'SalesRevenueNet', 'Revenue']);
|
||||
const netIncomeRow = findStandardizedRow(input.standardizedRows, 'net-income')
|
||||
?? findFaithfulRowByLocalNames(input.faithfulRows, ['NetIncomeLoss', 'ProfitLoss']);
|
||||
const assetsRow = findStandardizedRow(input.standardizedRows, 'total-assets')
|
||||
?? findFaithfulRowByLocalNames(input.faithfulRows, ['Assets']);
|
||||
const cashRow = findStandardizedRow(input.standardizedRows, 'cash-and-equivalents')
|
||||
?? findFaithfulRowByLocalNames(input.faithfulRows, ['CashAndCashEquivalentsAtCarryingValue', 'CashAndShortTermInvestments', 'Cash']);
|
||||
const debtRow = findStandardizedRow(input.standardizedRows, 'total-debt')
|
||||
?? findFaithfulRowByLocalNames(input.faithfulRows, ['LongTermDebt', 'Debt', 'LongTermDebtNoncurrent']);
|
||||
|
||||
const series = periods.map((period) => ({
|
||||
periodId: period.id,
|
||||
filingDate: period.filingDate,
|
||||
periodEnd: period.periodEnd,
|
||||
label: asOverviewLabel(period),
|
||||
revenue: revenueRow ? rowValue(revenueRow, period.id) : null,
|
||||
netIncome: netIncomeRow ? rowValue(netIncomeRow, period.id) : null,
|
||||
totalAssets: assetsRow ? rowValue(assetsRow, period.id) : null,
|
||||
cash: cashRow ? rowValue(cashRow, period.id) : null,
|
||||
debt: debtRow ? rowValue(debtRow, period.id) : null
|
||||
}));
|
||||
|
||||
const latest = series[series.length - 1] ?? null;
|
||||
|
||||
return {
|
||||
referencePeriodId: latest?.periodId ?? null,
|
||||
referenceDate: latest?.filingDate ?? null,
|
||||
latest: {
|
||||
revenue: latest?.revenue ?? null,
|
||||
netIncome: latest?.netIncome ?? null,
|
||||
totalAssets: latest?.totalAssets ?? null,
|
||||
cash: latest?.cash ?? null,
|
||||
debt: latest?.debt ?? null
|
||||
},
|
||||
series
|
||||
};
|
||||
}
|
||||
|
||||
export function defaultFinancialSyncLimit(window: FinancialHistoryWindow) {
|
||||
return window === 'all' ? 120 : 60;
|
||||
}
|
||||
@@ -806,6 +885,11 @@ export async function getCompanyFinancialTaxonomy(input: GetCompanyFinancialTaxo
|
||||
pendingFilings: Math.max(0, financialFilings.length - statuses.ready - statuses.partial - statuses.failed),
|
||||
queuedSync: input.queuedSync
|
||||
},
|
||||
overviewMetrics: buildOverviewMetrics({
|
||||
periods,
|
||||
faithfulRows,
|
||||
standardizedRows
|
||||
}),
|
||||
metrics,
|
||||
dimensionBreakdown
|
||||
};
|
||||
|
||||
@@ -1,4 +1,4 @@
|
||||
import { desc, eq } from 'drizzle-orm';
|
||||
import { desc, eq, inArray, max } from 'drizzle-orm';
|
||||
import type { Filing } from '@/lib/types';
|
||||
import { db } from '@/lib/server/db';
|
||||
import { filing, filingLink } from '@/lib/server/db/schema';
|
||||
@@ -87,6 +87,35 @@ export async function getFilingByAccession(accessionNumber: string) {
|
||||
return row ? toFiling(row) : null;
|
||||
}
|
||||
|
||||
export async function listLatestFilingDatesByTickers(tickers: string[]) {
|
||||
const normalizedTickers = [...new Set(
|
||||
tickers
|
||||
.map((ticker) => ticker.trim().toUpperCase())
|
||||
.filter((ticker) => ticker.length > 0)
|
||||
)];
|
||||
|
||||
if (normalizedTickers.length === 0) {
|
||||
return new Map<string, string>();
|
||||
}
|
||||
|
||||
const rows = await db
|
||||
.select({
|
||||
ticker: filing.ticker,
|
||||
latest_filing_date: max(filing.filing_date)
|
||||
})
|
||||
.from(filing)
|
||||
.where(inArray(filing.ticker, normalizedTickers))
|
||||
.groupBy(filing.ticker);
|
||||
|
||||
return new Map(
|
||||
rows
|
||||
.filter((row): row is { ticker: string; latest_filing_date: string } => {
|
||||
return typeof row.ticker === 'string' && typeof row.latest_filing_date === 'string';
|
||||
})
|
||||
.map((row) => [row.ticker, row.latest_filing_date])
|
||||
);
|
||||
}
|
||||
|
||||
export async function upsertFilingsRecords(items: UpsertFilingInput[]) {
|
||||
let inserted = 0;
|
||||
let updated = 0;
|
||||
|
||||
@@ -1,8 +1,8 @@
|
||||
import { and, eq } from 'drizzle-orm';
|
||||
import { and, desc, eq } from 'drizzle-orm';
|
||||
import type { Holding } from '@/lib/types';
|
||||
import { recalculateHolding } from '@/lib/server/portfolio';
|
||||
import { db } from '@/lib/server/db';
|
||||
import { holding } from '@/lib/server/db/schema';
|
||||
import { filing, holding, watchlistItem } from '@/lib/server/db/schema';
|
||||
|
||||
type HoldingRow = typeof holding.$inferSelect;
|
||||
|
||||
@@ -11,6 +11,7 @@ function toHolding(row: HoldingRow): Holding {
|
||||
id: row.id,
|
||||
user_id: row.user_id,
|
||||
ticker: row.ticker,
|
||||
company_name: row.company_name ?? null,
|
||||
shares: row.shares,
|
||||
avg_cost: row.avg_cost,
|
||||
current_price: row.current_price,
|
||||
@@ -36,6 +37,41 @@ function normalizeHoldingInput(input: { ticker: string; shares: number; avgCost:
|
||||
};
|
||||
}
|
||||
|
||||
async function resolveHoldingCompanyName(input: {
|
||||
userId: string;
|
||||
ticker: string;
|
||||
companyName?: string;
|
||||
existingCompanyName?: string | null;
|
||||
}) {
|
||||
const explicitCompanyName = input.companyName?.trim();
|
||||
if (explicitCompanyName) {
|
||||
return explicitCompanyName;
|
||||
}
|
||||
|
||||
if (input.existingCompanyName?.trim()) {
|
||||
return input.existingCompanyName.trim();
|
||||
}
|
||||
|
||||
const [watchlistMatch] = await db
|
||||
.select({ company_name: watchlistItem.company_name })
|
||||
.from(watchlistItem)
|
||||
.where(and(eq(watchlistItem.user_id, input.userId), eq(watchlistItem.ticker, input.ticker)))
|
||||
.limit(1);
|
||||
|
||||
if (watchlistMatch?.company_name?.trim()) {
|
||||
return watchlistMatch.company_name.trim();
|
||||
}
|
||||
|
||||
const [filingMatch] = await db
|
||||
.select({ company_name: filing.company_name })
|
||||
.from(filing)
|
||||
.where(eq(filing.ticker, input.ticker))
|
||||
.orderBy(desc(filing.filing_date), desc(filing.updated_at))
|
||||
.limit(1);
|
||||
|
||||
return filingMatch?.company_name?.trim() ? filingMatch.company_name.trim() : null;
|
||||
}
|
||||
|
||||
export async function listUserHoldings(userId: string) {
|
||||
const rows = await db
|
||||
.select()
|
||||
@@ -45,12 +81,28 @@ export async function listUserHoldings(userId: string) {
|
||||
return sortByMarketValueDesc(rows.map(toHolding));
|
||||
}
|
||||
|
||||
export async function getHoldingByTicker(userId: string, ticker: string) {
|
||||
const normalizedTicker = ticker.trim().toUpperCase();
|
||||
if (!normalizedTicker) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const [row] = await db
|
||||
.select()
|
||||
.from(holding)
|
||||
.where(and(eq(holding.user_id, userId), eq(holding.ticker, normalizedTicker)))
|
||||
.limit(1);
|
||||
|
||||
return row ? toHolding(row) : null;
|
||||
}
|
||||
|
||||
export async function upsertHoldingRecord(input: {
|
||||
userId: string;
|
||||
ticker: string;
|
||||
shares: number;
|
||||
avgCost: number;
|
||||
currentPrice?: number;
|
||||
companyName?: string;
|
||||
}) {
|
||||
const ticker = input.ticker.trim().toUpperCase();
|
||||
const now = new Date().toISOString();
|
||||
@@ -64,6 +116,12 @@ export async function upsertHoldingRecord(input: {
|
||||
const currentPrice = Number.isFinite(input.currentPrice)
|
||||
? Number(input.currentPrice)
|
||||
: input.avgCost;
|
||||
const companyName = await resolveHoldingCompanyName({
|
||||
userId: input.userId,
|
||||
ticker,
|
||||
companyName: input.companyName,
|
||||
existingCompanyName: existing?.company_name ?? null
|
||||
});
|
||||
|
||||
if (existing) {
|
||||
const normalized = normalizeHoldingInput({
|
||||
@@ -84,6 +142,7 @@ export async function upsertHoldingRecord(input: {
|
||||
.update(holding)
|
||||
.set({
|
||||
ticker: next.ticker,
|
||||
company_name: companyName,
|
||||
shares: next.shares,
|
||||
avg_cost: next.avg_cost,
|
||||
current_price: next.current_price,
|
||||
@@ -113,6 +172,7 @@ export async function upsertHoldingRecord(input: {
|
||||
id: 0,
|
||||
user_id: input.userId,
|
||||
ticker: normalized.ticker,
|
||||
company_name: companyName,
|
||||
shares: normalized.shares,
|
||||
avg_cost: normalized.avg_cost,
|
||||
current_price: normalized.current_price,
|
||||
@@ -131,6 +191,7 @@ export async function upsertHoldingRecord(input: {
|
||||
.values({
|
||||
user_id: created.user_id,
|
||||
ticker: created.ticker,
|
||||
company_name: created.company_name,
|
||||
shares: created.shares,
|
||||
avg_cost: created.avg_cost,
|
||||
current_price: created.current_price,
|
||||
@@ -155,6 +216,7 @@ export async function updateHoldingByIdRecord(input: {
|
||||
shares?: number;
|
||||
avgCost?: number;
|
||||
currentPrice?: number;
|
||||
companyName?: string;
|
||||
}) {
|
||||
const [existing] = await db
|
||||
.select()
|
||||
@@ -176,9 +238,16 @@ export async function updateHoldingByIdRecord(input: {
|
||||
const currentPrice = Number.isFinite(input.currentPrice)
|
||||
? Number(input.currentPrice)
|
||||
: Number(current.current_price ?? current.avg_cost);
|
||||
const companyName = await resolveHoldingCompanyName({
|
||||
userId: input.userId,
|
||||
ticker: current.ticker,
|
||||
companyName: input.companyName,
|
||||
existingCompanyName: current.company_name
|
||||
});
|
||||
|
||||
const next = recalculateHolding({
|
||||
...current,
|
||||
company_name: companyName,
|
||||
shares: shares.toFixed(6),
|
||||
avg_cost: avgCost.toFixed(6),
|
||||
current_price: currentPrice.toFixed(6),
|
||||
@@ -189,6 +258,7 @@ export async function updateHoldingByIdRecord(input: {
|
||||
const [updated] = await db
|
||||
.update(holding)
|
||||
.set({
|
||||
company_name: companyName,
|
||||
shares: next.shares,
|
||||
avg_cost: next.avg_cost,
|
||||
current_price: next.current_price,
|
||||
|
||||
148
lib/server/repos/research-journal.ts
Normal file
148
lib/server/repos/research-journal.ts
Normal file
@@ -0,0 +1,148 @@
|
||||
import { and, desc, eq } from 'drizzle-orm';
|
||||
import type {
|
||||
ResearchJournalEntry,
|
||||
ResearchJournalEntryType
|
||||
} from '@/lib/types';
|
||||
import { db } from '@/lib/server/db';
|
||||
import { researchJournalEntry } from '@/lib/server/db/schema';
|
||||
|
||||
type ResearchJournalRow = typeof researchJournalEntry.$inferSelect;
|
||||
|
||||
function normalizeTicker(ticker: string) {
|
||||
return ticker.trim().toUpperCase();
|
||||
}
|
||||
|
||||
function normalizeTitle(title?: string | null) {
|
||||
const normalized = title?.trim();
|
||||
return normalized ? normalized : null;
|
||||
}
|
||||
|
||||
function normalizeAccessionNumber(accessionNumber?: string | null) {
|
||||
const normalized = accessionNumber?.trim();
|
||||
return normalized ? normalized : null;
|
||||
}
|
||||
|
||||
function normalizeMetadata(metadata?: Record<string, unknown> | null) {
|
||||
if (!metadata || typeof metadata !== 'object' || Array.isArray(metadata)) {
|
||||
return null;
|
||||
}
|
||||
|
||||
return metadata;
|
||||
}
|
||||
|
||||
function toResearchJournalEntry(row: ResearchJournalRow): ResearchJournalEntry {
|
||||
return {
|
||||
id: row.id,
|
||||
user_id: row.user_id,
|
||||
ticker: row.ticker,
|
||||
accession_number: row.accession_number ?? null,
|
||||
entry_type: row.entry_type,
|
||||
title: row.title ?? null,
|
||||
body_markdown: row.body_markdown,
|
||||
metadata: row.metadata ?? null,
|
||||
created_at: row.created_at,
|
||||
updated_at: row.updated_at
|
||||
};
|
||||
}
|
||||
|
||||
export async function listResearchJournalEntries(userId: string, ticker: string, limit = 100) {
|
||||
const normalizedTicker = normalizeTicker(ticker);
|
||||
if (!normalizedTicker) {
|
||||
return [];
|
||||
}
|
||||
|
||||
const safeLimit = Math.min(Math.max(Math.trunc(limit), 1), 250);
|
||||
const rows = await db
|
||||
.select()
|
||||
.from(researchJournalEntry)
|
||||
.where(and(eq(researchJournalEntry.user_id, userId), eq(researchJournalEntry.ticker, normalizedTicker)))
|
||||
.orderBy(desc(researchJournalEntry.created_at), desc(researchJournalEntry.id))
|
||||
.limit(safeLimit);
|
||||
|
||||
return rows.map(toResearchJournalEntry);
|
||||
}
|
||||
|
||||
export async function createResearchJournalEntryRecord(input: {
|
||||
userId: string;
|
||||
ticker: string;
|
||||
accessionNumber?: string | null;
|
||||
entryType: ResearchJournalEntryType;
|
||||
title?: string | null;
|
||||
bodyMarkdown: string;
|
||||
metadata?: Record<string, unknown> | null;
|
||||
}) {
|
||||
const ticker = normalizeTicker(input.ticker);
|
||||
const bodyMarkdown = input.bodyMarkdown.trim();
|
||||
if (!ticker) {
|
||||
throw new Error('ticker is required');
|
||||
}
|
||||
|
||||
if (!bodyMarkdown) {
|
||||
throw new Error('bodyMarkdown is required');
|
||||
}
|
||||
|
||||
const now = new Date().toISOString();
|
||||
const [created] = await db
|
||||
.insert(researchJournalEntry)
|
||||
.values({
|
||||
user_id: input.userId,
|
||||
ticker,
|
||||
accession_number: normalizeAccessionNumber(input.accessionNumber),
|
||||
entry_type: input.entryType,
|
||||
title: normalizeTitle(input.title),
|
||||
body_markdown: bodyMarkdown,
|
||||
metadata: normalizeMetadata(input.metadata),
|
||||
created_at: now,
|
||||
updated_at: now
|
||||
})
|
||||
.returning();
|
||||
|
||||
return toResearchJournalEntry(created);
|
||||
}
|
||||
|
||||
export async function updateResearchJournalEntryRecord(input: {
|
||||
userId: string;
|
||||
id: number;
|
||||
title?: string | null;
|
||||
bodyMarkdown?: string;
|
||||
metadata?: Record<string, unknown> | null;
|
||||
}) {
|
||||
const [existing] = await db
|
||||
.select()
|
||||
.from(researchJournalEntry)
|
||||
.where(and(eq(researchJournalEntry.user_id, input.userId), eq(researchJournalEntry.id, input.id)))
|
||||
.limit(1);
|
||||
|
||||
if (!existing) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const nextBodyMarkdown = input.bodyMarkdown === undefined
|
||||
? existing.body_markdown
|
||||
: input.bodyMarkdown.trim();
|
||||
if (!nextBodyMarkdown) {
|
||||
throw new Error('bodyMarkdown is required');
|
||||
}
|
||||
|
||||
const [updated] = await db
|
||||
.update(researchJournalEntry)
|
||||
.set({
|
||||
title: input.title === undefined ? existing.title : normalizeTitle(input.title),
|
||||
body_markdown: nextBodyMarkdown,
|
||||
metadata: input.metadata === undefined ? existing.metadata ?? null : normalizeMetadata(input.metadata),
|
||||
updated_at: new Date().toISOString()
|
||||
})
|
||||
.where(and(eq(researchJournalEntry.user_id, input.userId), eq(researchJournalEntry.id, input.id)))
|
||||
.returning();
|
||||
|
||||
return updated ? toResearchJournalEntry(updated) : null;
|
||||
}
|
||||
|
||||
export async function deleteResearchJournalEntryRecord(userId: string, id: number) {
|
||||
const rows = await db
|
||||
.delete(researchJournalEntry)
|
||||
.where(and(eq(researchJournalEntry.user_id, userId), eq(researchJournalEntry.id, id)))
|
||||
.returning({ id: researchJournalEntry.id });
|
||||
|
||||
return rows.length > 0;
|
||||
}
|
||||
@@ -1,9 +1,15 @@
|
||||
import { and, desc, eq } from 'drizzle-orm';
|
||||
import type { WatchlistItem } from '@/lib/types';
|
||||
import type {
|
||||
CoveragePriority,
|
||||
CoverageStatus,
|
||||
WatchlistItem
|
||||
} from '@/lib/types';
|
||||
import { db } from '@/lib/server/db';
|
||||
import { watchlistItem } from '@/lib/server/db/schema';
|
||||
|
||||
type WatchlistRow = typeof watchlistItem.$inferSelect;
|
||||
const DEFAULT_STATUS: CoverageStatus = 'backlog';
|
||||
const DEFAULT_PRIORITY: CoveragePriority = 'medium';
|
||||
|
||||
function normalizeTags(tags?: string[]) {
|
||||
if (!Array.isArray(tags)) {
|
||||
@@ -32,7 +38,7 @@ function normalizeTags(tags?: string[]) {
|
||||
return [...unique];
|
||||
}
|
||||
|
||||
function toWatchlistItem(row: WatchlistRow): WatchlistItem {
|
||||
function toWatchlistItem(row: WatchlistRow, latestFilingDate: string | null = null): WatchlistItem {
|
||||
return {
|
||||
id: row.id,
|
||||
user_id: row.user_id,
|
||||
@@ -43,7 +49,12 @@ function toWatchlistItem(row: WatchlistRow): WatchlistItem {
|
||||
tags: Array.isArray(row.tags)
|
||||
? row.tags.filter((entry): entry is string => typeof entry === 'string')
|
||||
: [],
|
||||
created_at: row.created_at
|
||||
created_at: row.created_at,
|
||||
status: row.status ?? DEFAULT_STATUS,
|
||||
priority: row.priority ?? DEFAULT_PRIORITY,
|
||||
updated_at: row.updated_at || row.created_at,
|
||||
last_reviewed_at: row.last_reviewed_at ?? null,
|
||||
latest_filing_date: latestFilingDate
|
||||
};
|
||||
}
|
||||
|
||||
@@ -52,9 +63,19 @@ export async function listWatchlistItems(userId: string) {
|
||||
.select()
|
||||
.from(watchlistItem)
|
||||
.where(eq(watchlistItem.user_id, userId))
|
||||
.orderBy(desc(watchlistItem.created_at));
|
||||
.orderBy(desc(watchlistItem.updated_at), desc(watchlistItem.created_at));
|
||||
|
||||
return rows.map(toWatchlistItem);
|
||||
return rows.map((row) => toWatchlistItem(row));
|
||||
}
|
||||
|
||||
export async function getWatchlistItemById(userId: string, id: number) {
|
||||
const [row] = await db
|
||||
.select()
|
||||
.from(watchlistItem)
|
||||
.where(and(eq(watchlistItem.user_id, userId), eq(watchlistItem.id, id)))
|
||||
.limit(1);
|
||||
|
||||
return row ? toWatchlistItem(row) : null;
|
||||
}
|
||||
|
||||
export async function getWatchlistItemByTicker(userId: string, ticker: string) {
|
||||
@@ -79,11 +100,18 @@ export async function upsertWatchlistItemRecord(input: {
|
||||
sector?: string;
|
||||
category?: string;
|
||||
tags?: string[];
|
||||
status?: CoverageStatus;
|
||||
priority?: CoveragePriority;
|
||||
lastReviewedAt?: string | null;
|
||||
}) {
|
||||
const normalizedTicker = input.ticker.trim().toUpperCase();
|
||||
const normalizedSector = input.sector?.trim() ? input.sector.trim() : null;
|
||||
const normalizedCategory = input.category?.trim() ? input.category.trim() : null;
|
||||
const normalizedTags = normalizeTags(input.tags);
|
||||
const normalizedCompanyName = input.companyName.trim();
|
||||
const normalizedLastReviewedAt = input.lastReviewedAt?.trim()
|
||||
? input.lastReviewedAt.trim()
|
||||
: null;
|
||||
const now = new Date().toISOString();
|
||||
|
||||
const [inserted] = await db
|
||||
@@ -91,11 +119,15 @@ export async function upsertWatchlistItemRecord(input: {
|
||||
.values({
|
||||
user_id: input.userId,
|
||||
ticker: normalizedTicker,
|
||||
company_name: input.companyName,
|
||||
company_name: normalizedCompanyName,
|
||||
sector: normalizedSector,
|
||||
category: normalizedCategory,
|
||||
tags: normalizedTags,
|
||||
created_at: now
|
||||
status: input.status ?? DEFAULT_STATUS,
|
||||
priority: input.priority ?? DEFAULT_PRIORITY,
|
||||
created_at: now,
|
||||
updated_at: now,
|
||||
last_reviewed_at: normalizedLastReviewedAt
|
||||
})
|
||||
.onConflictDoNothing({
|
||||
target: [watchlistItem.user_id, watchlistItem.ticker],
|
||||
@@ -109,13 +141,23 @@ export async function upsertWatchlistItemRecord(input: {
|
||||
};
|
||||
}
|
||||
|
||||
const [existing] = await db
|
||||
.select()
|
||||
.from(watchlistItem)
|
||||
.where(and(eq(watchlistItem.user_id, input.userId), eq(watchlistItem.ticker, normalizedTicker)))
|
||||
.limit(1);
|
||||
|
||||
const [updated] = await db
|
||||
.update(watchlistItem)
|
||||
.set({
|
||||
company_name: input.companyName,
|
||||
company_name: normalizedCompanyName,
|
||||
sector: normalizedSector,
|
||||
category: normalizedCategory,
|
||||
tags: normalizedTags
|
||||
tags: normalizedTags,
|
||||
status: input.status ?? existing?.status ?? DEFAULT_STATUS,
|
||||
priority: input.priority ?? existing?.priority ?? DEFAULT_PRIORITY,
|
||||
updated_at: now,
|
||||
last_reviewed_at: normalizedLastReviewedAt ?? existing?.last_reviewed_at ?? null
|
||||
})
|
||||
.where(and(eq(watchlistItem.user_id, input.userId), eq(watchlistItem.ticker, normalizedTicker)))
|
||||
.returning();
|
||||
@@ -130,6 +172,93 @@ export async function upsertWatchlistItemRecord(input: {
|
||||
};
|
||||
}
|
||||
|
||||
export async function updateWatchlistItemRecord(input: {
|
||||
userId: string;
|
||||
id: number;
|
||||
companyName?: string;
|
||||
sector?: string;
|
||||
category?: string;
|
||||
tags?: string[];
|
||||
status?: CoverageStatus;
|
||||
priority?: CoveragePriority;
|
||||
lastReviewedAt?: string | null;
|
||||
}) {
|
||||
const [existing] = await db
|
||||
.select()
|
||||
.from(watchlistItem)
|
||||
.where(and(eq(watchlistItem.user_id, input.userId), eq(watchlistItem.id, input.id)))
|
||||
.limit(1);
|
||||
|
||||
if (!existing) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const nextCompanyName = input.companyName === undefined
|
||||
? existing.company_name
|
||||
: input.companyName.trim();
|
||||
if (!nextCompanyName) {
|
||||
throw new Error('companyName is required');
|
||||
}
|
||||
|
||||
const nextSector = input.sector === undefined
|
||||
? existing.sector
|
||||
: input.sector.trim()
|
||||
? input.sector.trim()
|
||||
: null;
|
||||
const nextCategory = input.category === undefined
|
||||
? existing.category
|
||||
: input.category.trim()
|
||||
? input.category.trim()
|
||||
: null;
|
||||
const nextTags = input.tags === undefined
|
||||
? existing.tags ?? null
|
||||
: normalizeTags(input.tags);
|
||||
const nextLastReviewedAt = input.lastReviewedAt === undefined
|
||||
? existing.last_reviewed_at
|
||||
: input.lastReviewedAt?.trim()
|
||||
? input.lastReviewedAt.trim()
|
||||
: null;
|
||||
|
||||
const [updated] = await db
|
||||
.update(watchlistItem)
|
||||
.set({
|
||||
company_name: nextCompanyName,
|
||||
sector: nextSector,
|
||||
category: nextCategory,
|
||||
tags: nextTags,
|
||||
status: input.status ?? existing.status ?? DEFAULT_STATUS,
|
||||
priority: input.priority ?? existing.priority ?? DEFAULT_PRIORITY,
|
||||
updated_at: new Date().toISOString(),
|
||||
last_reviewed_at: nextLastReviewedAt
|
||||
})
|
||||
.where(and(eq(watchlistItem.user_id, input.userId), eq(watchlistItem.id, input.id)))
|
||||
.returning();
|
||||
|
||||
return updated ? toWatchlistItem(updated) : null;
|
||||
}
|
||||
|
||||
export async function updateWatchlistReviewByTicker(
|
||||
userId: string,
|
||||
ticker: string,
|
||||
reviewedAt: string
|
||||
) {
|
||||
const normalizedTicker = ticker.trim().toUpperCase();
|
||||
if (!normalizedTicker) {
|
||||
return null;
|
||||
}
|
||||
|
||||
const [updated] = await db
|
||||
.update(watchlistItem)
|
||||
.set({
|
||||
last_reviewed_at: reviewedAt,
|
||||
updated_at: reviewedAt
|
||||
})
|
||||
.where(and(eq(watchlistItem.user_id, userId), eq(watchlistItem.ticker, normalizedTicker)))
|
||||
.returning();
|
||||
|
||||
return updated ? toWatchlistItem(updated) : null;
|
||||
}
|
||||
|
||||
export async function deleteWatchlistItemRecord(userId: string, id: number) {
|
||||
const removed = await db
|
||||
.delete(watchlistItem)
|
||||
|
||||
66
lib/types.ts
66
lib/types.ts
@@ -5,6 +5,10 @@ export type User = {
|
||||
image: string | null;
|
||||
};
|
||||
|
||||
export type CoverageStatus = 'backlog' | 'active' | 'watch' | 'archive';
|
||||
export type CoveragePriority = 'low' | 'medium' | 'high';
|
||||
export type ResearchJournalEntryType = 'note' | 'filing_note' | 'status_change';
|
||||
|
||||
export type WatchlistItem = {
|
||||
id: number;
|
||||
user_id: string;
|
||||
@@ -14,12 +18,18 @@ export type WatchlistItem = {
|
||||
category: string | null;
|
||||
tags: string[];
|
||||
created_at: string;
|
||||
status: CoverageStatus;
|
||||
priority: CoveragePriority;
|
||||
updated_at: string;
|
||||
last_reviewed_at: string | null;
|
||||
latest_filing_date: string | null;
|
||||
};
|
||||
|
||||
export type Holding = {
|
||||
id: number;
|
||||
user_id: string;
|
||||
ticker: string;
|
||||
company_name: string | null;
|
||||
shares: string;
|
||||
avg_cost: string;
|
||||
current_price: string | null;
|
||||
@@ -165,6 +175,19 @@ export type PortfolioInsight = {
|
||||
created_at: string;
|
||||
};
|
||||
|
||||
export type ResearchJournalEntry = {
|
||||
id: number;
|
||||
user_id: string;
|
||||
ticker: string;
|
||||
accession_number: string | null;
|
||||
entry_type: ResearchJournalEntryType;
|
||||
title: string | null;
|
||||
body_markdown: string;
|
||||
metadata: Record<string, unknown> | null;
|
||||
created_at: string;
|
||||
updated_at: string;
|
||||
};
|
||||
|
||||
export type CompanyFinancialPoint = {
|
||||
filingDate: string;
|
||||
filingType: Filing['filing_type'];
|
||||
@@ -332,6 +355,28 @@ export type CompanyFinancialStatementsResponse = {
|
||||
pendingFilings: number;
|
||||
queuedSync: boolean;
|
||||
};
|
||||
overviewMetrics: {
|
||||
referencePeriodId: string | null;
|
||||
referenceDate: string | null;
|
||||
latest: {
|
||||
revenue: number | null;
|
||||
netIncome: number | null;
|
||||
totalAssets: number | null;
|
||||
cash: number | null;
|
||||
debt: number | null;
|
||||
};
|
||||
series: Array<{
|
||||
periodId: string;
|
||||
filingDate: string;
|
||||
periodEnd: string | null;
|
||||
label: string;
|
||||
revenue: number | null;
|
||||
netIncome: number | null;
|
||||
totalAssets: number | null;
|
||||
cash: number | null;
|
||||
debt: number | null;
|
||||
}>;
|
||||
};
|
||||
metrics: {
|
||||
taxonomy: Filing['metrics'];
|
||||
validation: MetricValidationResult | null;
|
||||
@@ -371,6 +416,27 @@ export type CompanyAnalysis = {
|
||||
financials: CompanyFinancialPoint[];
|
||||
filings: Filing[];
|
||||
aiReports: CompanyAiReport[];
|
||||
coverage: WatchlistItem | null;
|
||||
journalPreview: ResearchJournalEntry[];
|
||||
recentAiReports: CompanyAiReport[];
|
||||
latestFilingSummary: {
|
||||
accessionNumber: string;
|
||||
filingDate: string;
|
||||
filingType: Filing['filing_type'];
|
||||
filingUrl: string | null;
|
||||
submissionUrl: string | null;
|
||||
summary: string | null;
|
||||
hasAnalysis: boolean;
|
||||
} | null;
|
||||
keyMetrics: {
|
||||
referenceDate: string | null;
|
||||
revenue: number | null;
|
||||
netIncome: number | null;
|
||||
totalAssets: number | null;
|
||||
cash: number | null;
|
||||
debt: number | null;
|
||||
netMargin: number | null;
|
||||
};
|
||||
};
|
||||
|
||||
export type NavGroup = 'overview' | 'research' | 'portfolio';
|
||||
|
||||
Reference in New Issue
Block a user