chore: commit all changes

This commit is contained in:
2026-02-26 13:26:18 -05:00
parent fd8edb1f21
commit 74fee52c4e
26 changed files with 4705 additions and 1108 deletions

View File

@@ -1,3 +1,5 @@
import { edenTreaty } from '@elysiajs/eden';
import type { App } from '@/lib/server/api/app';
import type {
Filing,
Holding,
@@ -11,6 +13,13 @@ import { resolveApiBaseURL } from './runtime-url';
const API_BASE = resolveApiBaseURL(process.env.NEXT_PUBLIC_API_URL);
const client = edenTreaty<App>(API_BASE, {
$fetch: {
credentials: 'include',
cache: 'no-store'
}
});
export class ApiError extends Error {
status: number;
@@ -21,56 +30,96 @@ export class ApiError extends Error {
}
}
async function apiFetch<T>(path: string, init?: RequestInit): Promise<T> {
const headers = new Headers(init?.headers);
if (!headers.has('Content-Type')) {
headers.set('Content-Type', 'application/json');
function extractErrorMessage(error: unknown, fallback: string) {
if (!error || typeof error !== 'object') {
return fallback;
}
const response = await fetch(`${API_BASE}${path}`, {
...init,
credentials: 'include',
headers,
cache: 'no-store'
});
const candidate = error as {
value?: unknown;
message?: string;
};
const body = await response.json().catch(() => ({}));
if (!response.ok) {
const message = typeof body?.error === 'string' ? body.error : `Request failed (${response.status})`;
throw new ApiError(message, response.status);
if (typeof candidate.message === 'string' && candidate.message.trim().length > 0) {
return candidate.message;
}
return body as T;
if (candidate.value && typeof candidate.value === 'object') {
const nested = candidate.value as { error?: unknown; message?: unknown };
if (typeof nested.error === 'string' && nested.error.trim().length > 0) {
return nested.error;
}
if (typeof nested.message === 'string' && nested.message.trim().length > 0) {
return nested.message;
}
}
if (typeof candidate.value === 'string' && candidate.value.trim().length > 0) {
return candidate.value;
}
return fallback;
}
type TreatyResult = {
data: unknown;
error: unknown;
status: number;
};
async function unwrapData<T>(result: TreatyResult, fallback: string) {
if (result.error) {
throw new ApiError(
extractErrorMessage(result.error, fallback),
result.status
);
}
if (result.data === null || result.data === undefined) {
throw new ApiError(fallback, result.status);
}
const payload = result.data instanceof Response
? await result.data.json().catch(() => null)
: result.data;
if (payload === null || payload === undefined) {
throw new ApiError(fallback, result.status);
}
return payload as T;
}
export async function getMe() {
return await apiFetch<{ user: User }>('/api/me');
const result = await client.api.me.get();
return await unwrapData<{ user: User }>(result, 'Unable to fetch session');
}
export async function listWatchlist() {
return await apiFetch<{ items: WatchlistItem[] }>('/api/watchlist');
const result = await client.api.watchlist.get();
return await unwrapData<{ items: WatchlistItem[] }>(result, 'Unable to fetch watchlist');
}
export async function upsertWatchlistItem(input: { ticker: string; companyName: string; sector?: string }) {
return await apiFetch<{ item: WatchlistItem }>('/api/watchlist', {
method: 'POST',
body: JSON.stringify(input)
});
const result = await client.api.watchlist.post(input);
return await unwrapData<{ item: WatchlistItem }>(result, 'Unable to save watchlist item');
}
export async function deleteWatchlistItem(id: number) {
return await apiFetch<{ success: boolean }>(`/api/watchlist/${id}`, {
method: 'DELETE'
});
const result = await client.api.watchlist[id].delete();
return await unwrapData<{ success: boolean }>(result, 'Unable to delete watchlist item');
}
export async function listHoldings() {
return await apiFetch<{ holdings: Holding[] }>('/api/portfolio/holdings');
const result = await client.api.portfolio.holdings.get();
return await unwrapData<{ holdings: Holding[] }>(result, 'Unable to fetch holdings');
}
export async function getPortfolioSummary() {
return await apiFetch<{ summary: PortfolioSummary }>('/api/portfolio/summary');
const result = await client.api.portfolio.summary.get();
return await unwrapData<{ summary: PortfolioSummary }>(result, 'Unable to fetch summary');
}
export async function upsertHolding(input: {
@@ -79,66 +128,62 @@ export async function upsertHolding(input: {
avgCost: number;
currentPrice?: number;
}) {
return await apiFetch<{ holding: Holding }>('/api/portfolio/holdings', {
method: 'POST',
body: JSON.stringify(input)
});
const result = await client.api.portfolio.holdings.post(input);
return await unwrapData<{ holding: Holding }>(result, 'Unable to save holding');
}
export async function deleteHolding(id: number) {
return await apiFetch<{ success: boolean }>(`/api/portfolio/holdings/${id}`, {
method: 'DELETE'
});
const result = await client.api.portfolio.holdings[id].delete();
return await unwrapData<{ success: boolean }>(result, 'Unable to delete holding');
}
export async function queuePriceRefresh() {
return await apiFetch<{ task: Task }>('/api/portfolio/refresh-prices', {
method: 'POST'
});
const result = await client.api.portfolio['refresh-prices'].post();
return await unwrapData<{ task: Task }>(result, 'Unable to queue price refresh');
}
export async function queuePortfolioInsights() {
return await apiFetch<{ task: Task }>('/api/portfolio/insights/generate', {
method: 'POST'
});
const result = await client.api.portfolio.insights.generate.post();
return await unwrapData<{ task: Task }>(result, 'Unable to queue portfolio insights');
}
export async function getLatestPortfolioInsight() {
return await apiFetch<{ insight: PortfolioInsight | null }>('/api/portfolio/insights/latest');
const result = await client.api.portfolio.insights.latest.get();
return await unwrapData<{ insight: PortfolioInsight | null }>(result, 'Unable to fetch latest insight');
}
export async function listFilings(query?: { ticker?: string; limit?: number }) {
const params = new URLSearchParams();
const result = await client.api.filings.get({
$query: {
ticker: query?.ticker,
limit: query?.limit
}
});
if (query?.ticker) {
params.set('ticker', query.ticker);
}
if (query?.limit) {
params.set('limit', String(query.limit));
}
const suffix = params.size > 0 ? `?${params.toString()}` : '';
return await apiFetch<{ filings: Filing[] }>(`/api/filings${suffix}`);
return await unwrapData<{ filings: Filing[] }>(result, 'Unable to fetch filings');
}
export async function queueFilingSync(input: { ticker: string; limit?: number }) {
return await apiFetch<{ task: Task }>('/api/filings/sync', {
method: 'POST',
body: JSON.stringify(input)
});
const result = await client.api.filings.sync.post(input);
return await unwrapData<{ task: Task }>(result, 'Unable to queue filing sync');
}
export async function queueFilingAnalysis(accessionNumber: string) {
return await apiFetch<{ task: Task }>(`/api/filings/${accessionNumber}/analyze`, {
method: 'POST'
});
const result = await client.api.filings[accessionNumber].analyze.post();
return await unwrapData<{ task: Task }>(result, 'Unable to queue filing analysis');
}
export async function getTask(taskId: string) {
return await apiFetch<{ task: Task }>(`/api/tasks/${taskId}`);
const result = await client.api.tasks[taskId].get();
return await unwrapData<{ task: Task }>(result, 'Unable to fetch task');
}
export async function listRecentTasks(limit = 20) {
return await apiFetch<{ tasks: Task[] }>(`/api/tasks?limit=${limit}`);
const result = await client.api.tasks.get({
$query: {
limit
}
});
return await unwrapData<{ tasks: Task[] }>(result, 'Unable to fetch tasks');
}

458
lib/server/api/app.ts Normal file
View File

@@ -0,0 +1,458 @@
import { Elysia, t } from 'elysia';
import type { TaskStatus } from '@/lib/types';
import { auth } from '@/lib/auth';
import { requireAuthenticatedSession } from '@/lib/server/auth-session';
import { asErrorMessage, jsonError } from '@/lib/server/http';
import { buildPortfolioSummary } from '@/lib/server/portfolio';
import { listFilingsRecords } from '@/lib/server/repos/filings';
import {
deleteHoldingByIdRecord,
listUserHoldings,
updateHoldingByIdRecord,
upsertHoldingRecord
} from '@/lib/server/repos/holdings';
import { getLatestPortfolioInsight } from '@/lib/server/repos/insights';
import {
deleteWatchlistItemRecord,
listWatchlistItems,
upsertWatchlistItemRecord
} from '@/lib/server/repos/watchlist';
import {
enqueueTask,
getTaskById,
getTaskQueueSnapshot,
listRecentTasks
} from '@/lib/server/tasks';
const ALLOWED_STATUSES: TaskStatus[] = ['queued', 'running', 'completed', 'failed'];
function asRecord(value: unknown): Record<string, unknown> {
if (!value || typeof value !== 'object' || Array.isArray(value)) {
return {};
}
return value as Record<string, unknown>;
}
function asPositiveNumber(value: unknown) {
const parsed = typeof value === 'number' ? value : Number(value);
return Number.isFinite(parsed) && parsed > 0 ? parsed : null;
}
const authHandler = ({ request }: { request: Request }) => auth.handler(request);
export const app = new Elysia({ prefix: '/api' })
.all('/auth', authHandler)
.all('/auth/*', authHandler)
.get('/health', async () => {
const queue = await getTaskQueueSnapshot();
return Response.json({
status: 'ok',
version: '4.0.0',
timestamp: new Date().toISOString(),
queue
});
})
.get('/me', async () => {
const { session, response } = await requireAuthenticatedSession();
if (response) {
return response;
}
return Response.json({
user: {
id: session.user.id,
email: session.user.email,
name: session.user.name,
image: session.user.image
}
});
})
.get('/watchlist', async () => {
const { session, response } = await requireAuthenticatedSession();
if (response) {
return response;
}
const items = await listWatchlistItems(session.user.id);
return Response.json({ items });
})
.post('/watchlist', async ({ body }) => {
const { session, response } = await requireAuthenticatedSession();
if (response) {
return response;
}
const payload = asRecord(body);
const ticker = typeof payload.ticker === 'string' ? payload.ticker.trim().toUpperCase() : '';
const companyName = typeof payload.companyName === 'string' ? payload.companyName.trim() : '';
const sector = typeof payload.sector === 'string' ? payload.sector.trim() : '';
if (!ticker) {
return jsonError('ticker is required');
}
if (!companyName) {
return jsonError('companyName is required');
}
try {
const item = await upsertWatchlistItemRecord({
userId: session.user.id,
ticker,
companyName,
sector
});
return Response.json({ item });
} catch (error) {
return jsonError(asErrorMessage(error, 'Failed to create watchlist item'));
}
}, {
body: t.Object({
ticker: t.String({ minLength: 1 }),
companyName: t.String({ minLength: 1 }),
sector: t.Optional(t.String())
})
})
.delete('/watchlist/:id', async ({ params }) => {
const { session, response } = await requireAuthenticatedSession();
if (response) {
return response;
}
const numericId = Number(params.id);
if (!Number.isInteger(numericId) || numericId <= 0) {
return jsonError('Invalid watchlist id', 400);
}
const removed = await deleteWatchlistItemRecord(session.user.id, numericId);
if (!removed) {
return jsonError('Watchlist item not found', 404);
}
return Response.json({ success: true });
}, {
params: t.Object({
id: t.String({ minLength: 1 })
})
})
.get('/portfolio/holdings', async () => {
const { session, response } = await requireAuthenticatedSession();
if (response) {
return response;
}
const holdings = await listUserHoldings(session.user.id);
return Response.json({ holdings });
})
.post('/portfolio/holdings', async ({ body }) => {
const { session, response } = await requireAuthenticatedSession();
if (response) {
return response;
}
const payload = asRecord(body);
const ticker = typeof payload.ticker === 'string' ? payload.ticker.trim().toUpperCase() : '';
const shares = asPositiveNumber(payload.shares);
const avgCost = asPositiveNumber(payload.avgCost);
if (!ticker) {
return jsonError('ticker is required');
}
if (shares === null) {
return jsonError('shares must be a positive number');
}
if (avgCost === null) {
return jsonError('avgCost must be a positive number');
}
try {
const currentPrice = asPositiveNumber(payload.currentPrice) ?? avgCost;
const holding = await upsertHoldingRecord({
userId: session.user.id,
ticker,
shares,
avgCost,
currentPrice
});
return Response.json({ holding });
} catch (error) {
return jsonError(asErrorMessage(error, 'Failed to save holding'));
}
}, {
body: t.Object({
ticker: t.String({ minLength: 1 }),
shares: t.Number({ exclusiveMinimum: 0 }),
avgCost: t.Number({ exclusiveMinimum: 0 }),
currentPrice: t.Optional(t.Number({ exclusiveMinimum: 0 }))
})
})
.patch('/portfolio/holdings/:id', async ({ params, body }) => {
const { session, response } = await requireAuthenticatedSession();
if (response) {
return response;
}
const numericId = Number(params.id);
if (!Number.isInteger(numericId) || numericId <= 0) {
return jsonError('Invalid holding id');
}
const payload = asRecord(body);
const updated = await updateHoldingByIdRecord({
userId: session.user.id,
id: numericId,
shares: asPositiveNumber(payload.shares) ?? undefined,
avgCost: asPositiveNumber(payload.avgCost) ?? undefined,
currentPrice: asPositiveNumber(payload.currentPrice) ?? undefined
});
if (!updated) {
return jsonError('Holding not found', 404);
}
return Response.json({ holding: updated });
}, {
params: t.Object({
id: t.String({ minLength: 1 })
}),
body: t.Object({
shares: t.Optional(t.Number({ exclusiveMinimum: 0 })),
avgCost: t.Optional(t.Number({ exclusiveMinimum: 0 })),
currentPrice: t.Optional(t.Number({ exclusiveMinimum: 0 }))
})
})
.delete('/portfolio/holdings/:id', async ({ params }) => {
const { session, response } = await requireAuthenticatedSession();
if (response) {
return response;
}
const numericId = Number(params.id);
if (!Number.isInteger(numericId) || numericId <= 0) {
return jsonError('Invalid holding id');
}
const removed = await deleteHoldingByIdRecord(session.user.id, numericId);
if (!removed) {
return jsonError('Holding not found', 404);
}
return Response.json({ success: true });
}, {
params: t.Object({
id: t.String({ minLength: 1 })
})
})
.get('/portfolio/summary', async () => {
const { session, response } = await requireAuthenticatedSession();
if (response) {
return response;
}
const holdings = await listUserHoldings(session.user.id);
const summary = buildPortfolioSummary(holdings);
return Response.json({ summary });
})
.post('/portfolio/refresh-prices', async () => {
const { session, response } = await requireAuthenticatedSession();
if (response) {
return response;
}
try {
const task = await enqueueTask({
userId: session.user.id,
taskType: 'refresh_prices',
payload: {},
priority: 80
});
return Response.json({ task });
} catch (error) {
return jsonError(asErrorMessage(error, 'Failed to queue refresh task'));
}
})
.post('/portfolio/insights/generate', async () => {
const { session, response } = await requireAuthenticatedSession();
if (response) {
return response;
}
try {
const task = await enqueueTask({
userId: session.user.id,
taskType: 'portfolio_insights',
payload: {},
priority: 70
});
return Response.json({ task });
} catch (error) {
return jsonError(asErrorMessage(error, 'Failed to queue insights task'));
}
})
.get('/portfolio/insights/latest', async () => {
const { session, response } = await requireAuthenticatedSession();
if (response) {
return response;
}
const insight = await getLatestPortfolioInsight(session.user.id);
return Response.json({ insight });
})
.get('/filings', async ({ query }) => {
const { response } = await requireAuthenticatedSession();
if (response) {
return response;
}
const tickerFilter = typeof query.ticker === 'string'
? query.ticker.trim().toUpperCase()
: undefined;
const limit = typeof query.limit === 'number'
? query.limit
: Number(query.limit);
const filings = await listFilingsRecords({
ticker: tickerFilter,
limit: Number.isFinite(limit) ? limit : 50
});
return Response.json({ filings });
}, {
query: t.Object({
ticker: t.Optional(t.String()),
limit: t.Optional(t.Numeric())
})
})
.post('/filings/sync', async ({ body }) => {
const { session, response } = await requireAuthenticatedSession();
if (response) {
return response;
}
const payload = asRecord(body);
const ticker = typeof payload.ticker === 'string' ? payload.ticker.trim().toUpperCase() : '';
if (!ticker) {
return jsonError('ticker is required');
}
try {
const limit = typeof payload.limit === 'number' ? payload.limit : Number(payload.limit);
const task = await enqueueTask({
userId: session.user.id,
taskType: 'sync_filings',
payload: {
ticker,
limit: Number.isFinite(limit) ? limit : 20
},
priority: 90
});
return Response.json({ task });
} catch (error) {
return jsonError(asErrorMessage(error, 'Failed to queue filings sync task'));
}
}, {
body: t.Object({
ticker: t.String({ minLength: 1 }),
limit: t.Optional(t.Numeric())
})
})
.post('/filings/:accessionNumber/analyze', async ({ params }) => {
const { session, response } = await requireAuthenticatedSession();
if (response) {
return response;
}
const accessionNumber = params.accessionNumber?.trim() ?? '';
if (accessionNumber.length < 4) {
return jsonError('Invalid accession number');
}
try {
const task = await enqueueTask({
userId: session.user.id,
taskType: 'analyze_filing',
payload: { accessionNumber },
priority: 65
});
return Response.json({ task });
} catch (error) {
return jsonError(asErrorMessage(error, 'Failed to queue filing analysis task'));
}
}, {
params: t.Object({
accessionNumber: t.String({ minLength: 4 })
})
})
.get('/tasks', async ({ query }) => {
const { session, response } = await requireAuthenticatedSession();
if (response) {
return response;
}
const limit = typeof query.limit === 'number'
? query.limit
: Number(query.limit ?? 20);
const statusInput = query.status;
const rawStatuses = Array.isArray(statusInput)
? statusInput
: statusInput
? [statusInput]
: [];
const statuses = rawStatuses.filter((status): status is TaskStatus => {
return ALLOWED_STATUSES.includes(status as TaskStatus);
});
const tasks = await listRecentTasks(
session.user.id,
Number.isFinite(limit) ? limit : 20,
statuses.length > 0 ? statuses : undefined
);
return Response.json({ tasks });
}, {
query: t.Object({
limit: t.Optional(t.Numeric()),
status: t.Optional(t.Union([t.String(), t.Array(t.String())]))
})
})
.get('/tasks/:taskId', async ({ params }) => {
const { session, response } = await requireAuthenticatedSession();
if (response) {
return response;
}
const task = await getTaskById(params.taskId, session.user.id);
if (!task) {
return jsonError('Task not found', 404);
}
return Response.json({ task });
}, {
params: t.Object({
taskId: t.String({ minLength: 1 })
})
});
export type App = typeof app;

View File

@@ -1,14 +1,14 @@
import { drizzle } from 'drizzle-orm/node-postgres';
import { Pool } from 'pg';
import { authSchema } from './schema';
import { schema } from './schema';
type AuthDrizzleDb = ReturnType<typeof createDb>;
type AppDrizzleDb = ReturnType<typeof createDb>;
declare global {
// eslint-disable-next-line no-var
var __fiscalAuthPgPool: Pool | undefined;
var __fiscalPgPool: Pool | undefined;
// eslint-disable-next-line no-var
var __fiscalAuthDrizzleDb: AuthDrizzleDb | undefined;
var __fiscalDrizzleDb: AppDrizzleDb | undefined;
}
function getConnectionString() {
@@ -21,21 +21,21 @@ function getConnectionString() {
}
export function getPool() {
if (!globalThis.__fiscalAuthPgPool) {
globalThis.__fiscalAuthPgPool = new Pool({
if (!globalThis.__fiscalPgPool) {
globalThis.__fiscalPgPool = new Pool({
connectionString: getConnectionString()
});
}
return globalThis.__fiscalAuthPgPool;
return globalThis.__fiscalPgPool;
}
function createDb() {
return drizzle(getPool(), { schema: authSchema });
return drizzle(getPool(), { schema });
}
export const db = globalThis.__fiscalAuthDrizzleDb ?? createDb();
export const db = globalThis.__fiscalDrizzleDb ?? createDb();
if (!globalThis.__fiscalAuthDrizzleDb) {
globalThis.__fiscalAuthDrizzleDb = db;
if (!globalThis.__fiscalDrizzleDb) {
globalThis.__fiscalDrizzleDb = db;
}

View File

@@ -1,22 +1,52 @@
import { boolean, index, pgTable, text, timestamp, uniqueIndex } from 'drizzle-orm/pg-core';
import {
boolean,
index,
integer,
jsonb,
numeric,
pgTable,
text,
timestamp,
uniqueIndex
} from 'drizzle-orm/pg-core';
const dateColumn = {
type FilingMetrics = {
revenue: number | null;
netIncome: number | null;
totalAssets: number | null;
cash: number | null;
debt: number | null;
};
type FilingAnalysis = {
provider?: string;
model?: string;
text?: string;
legacyInsights?: string;
};
const authDateColumn = {
withTimezone: true,
mode: 'date'
} as const;
const appDateColumn = {
withTimezone: true,
mode: 'string'
} as const;
export const user = pgTable('user', {
id: text('id').primaryKey().notNull(),
name: text('name').notNull(),
email: text('email').notNull(),
emailVerified: boolean('emailVerified').notNull().default(false),
image: text('image'),
createdAt: timestamp('createdAt', dateColumn).notNull(),
updatedAt: timestamp('updatedAt', dateColumn).notNull(),
createdAt: timestamp('createdAt', authDateColumn).notNull(),
updatedAt: timestamp('updatedAt', authDateColumn).notNull(),
role: text('role'),
banned: boolean('banned').default(false),
banReason: text('banReason'),
banExpires: timestamp('banExpires', dateColumn)
banExpires: timestamp('banExpires', authDateColumn)
}, (table) => ({
userEmailUnique: uniqueIndex('user_email_uidx').on(table.email)
}));
@@ -26,7 +56,7 @@ export const organization = pgTable('organization', {
name: text('name').notNull(),
slug: text('slug').notNull(),
logo: text('logo'),
createdAt: timestamp('createdAt', dateColumn).notNull(),
createdAt: timestamp('createdAt', authDateColumn).notNull(),
metadata: text('metadata')
}, (table) => ({
organizationSlugUnique: uniqueIndex('organization_slug_uidx').on(table.slug)
@@ -34,10 +64,10 @@ export const organization = pgTable('organization', {
export const session = pgTable('session', {
id: text('id').primaryKey().notNull(),
expiresAt: timestamp('expiresAt', dateColumn).notNull(),
expiresAt: timestamp('expiresAt', authDateColumn).notNull(),
token: text('token').notNull(),
createdAt: timestamp('createdAt', dateColumn).notNull(),
updatedAt: timestamp('updatedAt', dateColumn).notNull(),
createdAt: timestamp('createdAt', authDateColumn).notNull(),
updatedAt: timestamp('updatedAt', authDateColumn).notNull(),
ipAddress: text('ipAddress'),
userAgent: text('userAgent'),
userId: text('userId').notNull().references(() => user.id, { onDelete: 'cascade' }),
@@ -56,12 +86,12 @@ export const account = pgTable('account', {
accessToken: text('accessToken'),
refreshToken: text('refreshToken'),
idToken: text('idToken'),
accessTokenExpiresAt: timestamp('accessTokenExpiresAt', dateColumn),
refreshTokenExpiresAt: timestamp('refreshTokenExpiresAt', dateColumn),
accessTokenExpiresAt: timestamp('accessTokenExpiresAt', authDateColumn),
refreshTokenExpiresAt: timestamp('refreshTokenExpiresAt', authDateColumn),
scope: text('scope'),
password: text('password'),
createdAt: timestamp('createdAt', dateColumn).notNull(),
updatedAt: timestamp('updatedAt', dateColumn).notNull()
createdAt: timestamp('createdAt', authDateColumn).notNull(),
updatedAt: timestamp('updatedAt', authDateColumn).notNull()
}, (table) => ({
accountUserIdIndex: index('account_userId_idx').on(table.userId)
}));
@@ -70,9 +100,9 @@ export const verification = pgTable('verification', {
id: text('id').primaryKey().notNull(),
identifier: text('identifier').notNull(),
value: text('value').notNull(),
expiresAt: timestamp('expiresAt', dateColumn).notNull(),
createdAt: timestamp('createdAt', dateColumn).notNull(),
updatedAt: timestamp('updatedAt', dateColumn).notNull()
expiresAt: timestamp('expiresAt', authDateColumn).notNull(),
createdAt: timestamp('createdAt', authDateColumn).notNull(),
updatedAt: timestamp('updatedAt', authDateColumn).notNull()
}, (table) => ({
verificationIdentifierIndex: index('verification_identifier_idx').on(table.identifier)
}));
@@ -82,7 +112,7 @@ export const member = pgTable('member', {
organizationId: text('organizationId').notNull().references(() => organization.id, { onDelete: 'cascade' }),
userId: text('userId').notNull().references(() => user.id, { onDelete: 'cascade' }),
role: text('role').notNull().default('member'),
createdAt: timestamp('createdAt', dateColumn).notNull()
createdAt: timestamp('createdAt', authDateColumn).notNull()
}, (table) => ({
memberOrganizationIdIndex: index('member_organizationId_idx').on(table.organizationId),
memberUserIdIndex: index('member_userId_idx').on(table.userId)
@@ -94,14 +124,109 @@ export const invitation = pgTable('invitation', {
email: text('email').notNull(),
role: text('role'),
status: text('status').notNull().default('pending'),
expiresAt: timestamp('expiresAt', dateColumn).notNull(),
createdAt: timestamp('createdAt', dateColumn).notNull(),
expiresAt: timestamp('expiresAt', authDateColumn).notNull(),
createdAt: timestamp('createdAt', authDateColumn).notNull(),
inviterId: text('inviterId').notNull().references(() => user.id, { onDelete: 'cascade' })
}, (table) => ({
invitationOrganizationIdIndex: index('invitation_organizationId_idx').on(table.organizationId),
invitationEmailIndex: index('invitation_email_idx').on(table.email)
}));
export const watchlistItem = pgTable('watchlist_item', {
id: integer('id').generatedAlwaysAsIdentity().primaryKey(),
user_id: text('user_id').notNull().references(() => user.id, { onDelete: 'cascade' }),
ticker: text('ticker').notNull(),
company_name: text('company_name').notNull(),
sector: text('sector'),
created_at: timestamp('created_at', appDateColumn).notNull()
}, (table) => ({
watchlistUserTickerUnique: uniqueIndex('watchlist_user_ticker_uidx').on(table.user_id, table.ticker),
watchlistUserCreatedIndex: index('watchlist_user_created_idx').on(table.user_id, table.created_at)
}));
export const holding = pgTable('holding', {
id: integer('id').generatedAlwaysAsIdentity().primaryKey(),
user_id: text('user_id').notNull().references(() => user.id, { onDelete: 'cascade' }),
ticker: text('ticker').notNull(),
shares: numeric('shares', { precision: 30, scale: 6 }).notNull(),
avg_cost: numeric('avg_cost', { precision: 30, scale: 6 }).notNull(),
current_price: numeric('current_price', { precision: 30, scale: 6 }),
market_value: numeric('market_value', { precision: 30, scale: 2 }).notNull(),
gain_loss: numeric('gain_loss', { precision: 30, scale: 2 }).notNull(),
gain_loss_pct: numeric('gain_loss_pct', { precision: 30, scale: 2 }).notNull(),
last_price_at: timestamp('last_price_at', appDateColumn),
created_at: timestamp('created_at', appDateColumn).notNull(),
updated_at: timestamp('updated_at', appDateColumn).notNull()
}, (table) => ({
holdingUserTickerUnique: uniqueIndex('holding_user_ticker_uidx').on(table.user_id, table.ticker),
holdingUserIndex: index('holding_user_idx').on(table.user_id)
}));
export const filing = pgTable('filing', {
id: integer('id').generatedAlwaysAsIdentity().primaryKey(),
ticker: text('ticker').notNull(),
filing_type: text('filing_type').$type<'10-K' | '10-Q' | '8-K'>().notNull(),
filing_date: text('filing_date').notNull(),
accession_number: text('accession_number').notNull(),
cik: text('cik').notNull(),
company_name: text('company_name').notNull(),
filing_url: text('filing_url'),
submission_url: text('submission_url'),
primary_document: text('primary_document'),
metrics: jsonb('metrics').$type<FilingMetrics | null>(),
analysis: jsonb('analysis').$type<FilingAnalysis | null>(),
created_at: timestamp('created_at', appDateColumn).notNull(),
updated_at: timestamp('updated_at', appDateColumn).notNull()
}, (table) => ({
filingAccessionUnique: uniqueIndex('filing_accession_uidx').on(table.accession_number),
filingTickerDateIndex: index('filing_ticker_date_idx').on(table.ticker, table.filing_date),
filingDateIndex: index('filing_date_idx').on(table.filing_date)
}));
export const filingLink = pgTable('filing_link', {
id: integer('id').generatedAlwaysAsIdentity().primaryKey(),
filing_id: integer('filing_id').notNull().references(() => filing.id, { onDelete: 'cascade' }),
link_type: text('link_type').notNull(),
url: text('url').notNull(),
source: text('source').notNull().default('sec'),
created_at: timestamp('created_at', appDateColumn).notNull()
}, (table) => ({
filingLinkUnique: uniqueIndex('filing_link_unique_uidx').on(table.filing_id, table.url),
filingLinkFilingIndex: index('filing_link_filing_idx').on(table.filing_id)
}));
export const taskRun = pgTable('task_run', {
id: text('id').primaryKey().notNull(),
user_id: text('user_id').notNull().references(() => user.id, { onDelete: 'cascade' }),
task_type: text('task_type').$type<'sync_filings' | 'refresh_prices' | 'analyze_filing' | 'portfolio_insights'>().notNull(),
status: text('status').$type<'queued' | 'running' | 'completed' | 'failed'>().notNull(),
priority: integer('priority').notNull(),
payload: jsonb('payload').$type<Record<string, unknown>>().notNull(),
result: jsonb('result').$type<Record<string, unknown> | null>(),
error: text('error'),
attempts: integer('attempts').notNull(),
max_attempts: integer('max_attempts').notNull(),
workflow_run_id: text('workflow_run_id'),
created_at: timestamp('created_at', appDateColumn).notNull(),
updated_at: timestamp('updated_at', appDateColumn).notNull(),
finished_at: timestamp('finished_at', appDateColumn)
}, (table) => ({
taskUserCreatedIndex: index('task_user_created_idx').on(table.user_id, table.created_at),
taskStatusIndex: index('task_status_idx').on(table.status),
taskWorkflowRunUnique: uniqueIndex('task_workflow_run_uidx').on(table.workflow_run_id)
}));
export const portfolioInsight = pgTable('portfolio_insight', {
id: integer('id').generatedAlwaysAsIdentity().primaryKey(),
user_id: text('user_id').notNull().references(() => user.id, { onDelete: 'cascade' }),
provider: text('provider').notNull(),
model: text('model').notNull(),
content: text('content').notNull(),
created_at: timestamp('created_at', appDateColumn).notNull()
}, (table) => ({
insightUserCreatedIndex: index('insight_user_created_idx').on(table.user_id, table.created_at)
}));
export const authSchema = {
user,
session,
@@ -111,3 +236,17 @@ export const authSchema = {
member,
invitation
};
export const appSchema = {
watchlistItem,
holding,
filing,
filingLink,
taskRun,
portfolioInsight
};
export const schema = {
...authSchema,
...appSchema
};

172
lib/server/repos/filings.ts Normal file
View File

@@ -0,0 +1,172 @@
import { desc, eq } from 'drizzle-orm';
import type { Filing } from '@/lib/types';
import { db } from '@/lib/server/db';
import { filing, filingLink } from '@/lib/server/db/schema';
type FilingRow = typeof filing.$inferSelect;
type FilingLinkInput = {
link_type: string;
url: string;
};
type UpsertFilingInput = {
ticker: string;
filing_type: Filing['filing_type'];
filing_date: string;
accession_number: string;
cik: string;
company_name: string;
filing_url: string | null;
submission_url: string | null;
primary_document: string | null;
metrics: Filing['metrics'];
links: FilingLinkInput[];
};
function toFiling(row: FilingRow): Filing {
return {
id: row.id,
ticker: row.ticker,
filing_type: row.filing_type,
filing_date: row.filing_date,
accession_number: row.accession_number,
cik: row.cik,
company_name: row.company_name,
filing_url: row.filing_url,
submission_url: row.submission_url,
primary_document: row.primary_document,
metrics: row.metrics ?? null,
analysis: row.analysis ?? null,
created_at: row.created_at,
updated_at: row.updated_at
};
}
function dedupeLinks(links: FilingLinkInput[]) {
const unique = new Map<string, FilingLinkInput>();
for (const link of links) {
const url = link.url.trim();
if (!url) {
continue;
}
unique.set(`${link.link_type}::${url}`, { ...link, url });
}
return [...unique.values()];
}
export async function listFilingsRecords(query?: { ticker?: string; limit?: number }) {
const safeLimit = Math.min(Math.max(Math.trunc(query?.limit ?? 50), 1), 250);
const rows = query?.ticker
? await db
.select()
.from(filing)
.where(eq(filing.ticker, query.ticker))
.orderBy(desc(filing.filing_date), desc(filing.updated_at))
.limit(safeLimit)
: await db
.select()
.from(filing)
.orderBy(desc(filing.filing_date), desc(filing.updated_at))
.limit(safeLimit);
return rows.map(toFiling);
}
export async function getFilingByAccession(accessionNumber: string) {
const [row] = await db
.select()
.from(filing)
.where(eq(filing.accession_number, accessionNumber))
.limit(1);
return row ? toFiling(row) : null;
}
export async function upsertFilingsRecords(items: UpsertFilingInput[]) {
let inserted = 0;
let updated = 0;
for (const item of items) {
const now = new Date().toISOString();
const existing = await getFilingByAccession(item.accession_number);
const [saved] = await db
.insert(filing)
.values({
ticker: item.ticker,
filing_type: item.filing_type,
filing_date: item.filing_date,
accession_number: item.accession_number,
cik: item.cik,
company_name: item.company_name,
filing_url: item.filing_url,
submission_url: item.submission_url,
primary_document: item.primary_document,
metrics: item.metrics,
analysis: existing?.analysis ?? null,
created_at: existing?.created_at ?? now,
updated_at: now
})
.onConflictDoUpdate({
target: filing.accession_number,
set: {
ticker: item.ticker,
filing_type: item.filing_type,
filing_date: item.filing_date,
cik: item.cik,
company_name: item.company_name,
filing_url: item.filing_url,
submission_url: item.submission_url,
primary_document: item.primary_document,
metrics: item.metrics,
updated_at: now
}
})
.returning({ id: filing.id });
const links = dedupeLinks(item.links);
for (const link of links) {
await db
.insert(filingLink)
.values({
filing_id: saved.id,
link_type: link.link_type,
url: link.url,
source: 'sec',
created_at: now
})
.onConflictDoNothing();
}
if (existing) {
updated += 1;
} else {
inserted += 1;
}
}
return { inserted, updated };
}
export async function saveFilingAnalysis(
accessionNumber: string,
analysis: Filing['analysis']
) {
const [updated] = await db
.update(filing)
.set({
analysis,
updated_at: new Date().toISOString()
})
.where(eq(filing.accession_number, accessionNumber))
.returning();
return updated ? toFiling(updated) : null;
}

View File

@@ -0,0 +1,260 @@
import { and, eq } from 'drizzle-orm';
import type { Holding } from '@/lib/types';
import { recalculateHolding } from '@/lib/server/portfolio';
import { db } from '@/lib/server/db';
import { holding } from '@/lib/server/db/schema';
type HoldingRow = typeof holding.$inferSelect;
function toHolding(row: HoldingRow): Holding {
return {
id: row.id,
user_id: row.user_id,
ticker: row.ticker,
shares: row.shares,
avg_cost: row.avg_cost,
current_price: row.current_price,
market_value: row.market_value,
gain_loss: row.gain_loss,
gain_loss_pct: row.gain_loss_pct,
last_price_at: row.last_price_at,
created_at: row.created_at,
updated_at: row.updated_at
};
}
function sortByMarketValueDesc(rows: Holding[]) {
return rows.slice().sort((a, b) => Number(b.market_value) - Number(a.market_value));
}
function normalizeHoldingInput(input: { ticker: string; shares: number; avgCost: number; currentPrice: number }) {
return {
ticker: input.ticker.trim().toUpperCase(),
shares: input.shares.toFixed(6),
avg_cost: input.avgCost.toFixed(6),
current_price: input.currentPrice.toFixed(6)
};
}
export async function listUserHoldings(userId: string) {
const rows = await db
.select()
.from(holding)
.where(eq(holding.user_id, userId));
return sortByMarketValueDesc(rows.map(toHolding));
}
export async function upsertHoldingRecord(input: {
userId: string;
ticker: string;
shares: number;
avgCost: number;
currentPrice?: number;
}) {
const ticker = input.ticker.trim().toUpperCase();
const now = new Date().toISOString();
const [existing] = await db
.select()
.from(holding)
.where(and(eq(holding.user_id, input.userId), eq(holding.ticker, ticker)))
.limit(1);
const currentPrice = Number.isFinite(input.currentPrice)
? Number(input.currentPrice)
: input.avgCost;
if (existing) {
const normalized = normalizeHoldingInput({
ticker,
shares: input.shares,
avgCost: input.avgCost,
currentPrice
});
const next = recalculateHolding({
...toHolding(existing),
...normalized,
updated_at: now,
last_price_at: now
});
const [updated] = await db
.update(holding)
.set({
ticker: next.ticker,
shares: next.shares,
avg_cost: next.avg_cost,
current_price: next.current_price,
market_value: next.market_value,
gain_loss: next.gain_loss,
gain_loss_pct: next.gain_loss_pct,
updated_at: next.updated_at,
last_price_at: next.last_price_at
})
.where(eq(holding.id, existing.id))
.returning();
return toHolding(updated);
}
const normalized = normalizeHoldingInput({
ticker,
shares: input.shares,
avgCost: input.avgCost,
currentPrice
});
const createdBase: Holding = {
id: 0,
user_id: input.userId,
ticker: normalized.ticker,
shares: normalized.shares,
avg_cost: normalized.avg_cost,
current_price: normalized.current_price,
market_value: '0',
gain_loss: '0',
gain_loss_pct: '0',
last_price_at: now,
created_at: now,
updated_at: now
};
const created = recalculateHolding(createdBase);
const [inserted] = await db
.insert(holding)
.values({
user_id: created.user_id,
ticker: created.ticker,
shares: created.shares,
avg_cost: created.avg_cost,
current_price: created.current_price,
market_value: created.market_value,
gain_loss: created.gain_loss,
gain_loss_pct: created.gain_loss_pct,
last_price_at: created.last_price_at,
created_at: created.created_at,
updated_at: created.updated_at
})
.returning();
return toHolding(inserted);
}
export async function updateHoldingByIdRecord(input: {
userId: string;
id: number;
shares?: number;
avgCost?: number;
currentPrice?: number;
}) {
const [existing] = await db
.select()
.from(holding)
.where(and(eq(holding.id, input.id), eq(holding.user_id, input.userId)))
.limit(1);
if (!existing) {
return null;
}
const current = toHolding(existing);
const shares = Number.isFinite(input.shares)
? Number(input.shares)
: Number(current.shares);
const avgCost = Number.isFinite(input.avgCost)
? Number(input.avgCost)
: Number(current.avg_cost);
const currentPrice = Number.isFinite(input.currentPrice)
? Number(input.currentPrice)
: Number(current.current_price ?? current.avg_cost);
const next = recalculateHolding({
...current,
shares: shares.toFixed(6),
avg_cost: avgCost.toFixed(6),
current_price: currentPrice.toFixed(6),
updated_at: new Date().toISOString(),
last_price_at: new Date().toISOString()
});
const [updated] = await db
.update(holding)
.set({
shares: next.shares,
avg_cost: next.avg_cost,
current_price: next.current_price,
market_value: next.market_value,
gain_loss: next.gain_loss,
gain_loss_pct: next.gain_loss_pct,
updated_at: next.updated_at,
last_price_at: next.last_price_at
})
.where(eq(holding.id, existing.id))
.returning();
return toHolding(updated);
}
export async function deleteHoldingByIdRecord(userId: string, id: number) {
const rows = await db
.delete(holding)
.where(and(eq(holding.user_id, userId), eq(holding.id, id)))
.returning({ id: holding.id });
return rows.length > 0;
}
export async function listHoldingsForPriceRefresh(userId: string) {
const rows = await db
.select()
.from(holding)
.where(eq(holding.user_id, userId));
return rows.map(toHolding);
}
export async function applyRefreshedPrices(
userId: string,
quotes: Map<string, number>,
updateTime: string
) {
const rows = await db
.select()
.from(holding)
.where(eq(holding.user_id, userId));
let updatedCount = 0;
for (const row of rows) {
const quote = quotes.get(row.ticker);
if (quote === undefined) {
continue;
}
const next = recalculateHolding({
...toHolding(row),
current_price: quote.toFixed(6),
last_price_at: updateTime,
updated_at: updateTime
});
await db
.update(holding)
.set({
current_price: next.current_price,
market_value: next.market_value,
gain_loss: next.gain_loss,
gain_loss_pct: next.gain_loss_pct,
last_price_at: next.last_price_at,
updated_at: next.updated_at
})
.where(eq(holding.id, row.id));
updatedCount += 1;
}
return updatedCount;
}

View File

@@ -0,0 +1,48 @@
import { desc, eq } from 'drizzle-orm';
import type { PortfolioInsight } from '@/lib/types';
import { db } from '@/lib/server/db';
import { portfolioInsight } from '@/lib/server/db/schema';
type InsightRow = typeof portfolioInsight.$inferSelect;
function toInsight(row: InsightRow): PortfolioInsight {
return {
id: row.id,
user_id: row.user_id,
provider: row.provider,
model: row.model,
content: row.content,
created_at: row.created_at
};
}
export async function createPortfolioInsight(input: {
userId: string;
provider: string;
model: string;
content: string;
}) {
const [created] = await db
.insert(portfolioInsight)
.values({
user_id: input.userId,
provider: input.provider,
model: input.model,
content: input.content,
created_at: new Date().toISOString()
})
.returning();
return toInsight(created);
}
export async function getLatestPortfolioInsight(userId: string) {
const [row] = await db
.select()
.from(portfolioInsight)
.where(eq(portfolioInsight.user_id, userId))
.orderBy(desc(portfolioInsight.created_at))
.limit(1);
return row ? toInsight(row) : null;
}

195
lib/server/repos/tasks.ts Normal file
View File

@@ -0,0 +1,195 @@
import { and, desc, eq, inArray, sql } from 'drizzle-orm';
import type { Task, TaskStatus, TaskType } from '@/lib/types';
import { db } from '@/lib/server/db';
import { taskRun } from '@/lib/server/db/schema';
type TaskRow = typeof taskRun.$inferSelect;
type CreateTaskInput = {
id: string;
user_id: string;
task_type: TaskType;
payload: Record<string, unknown>;
priority: number;
max_attempts: number;
};
function toTask(row: TaskRow): Task {
return {
id: row.id,
user_id: row.user_id,
task_type: row.task_type,
status: row.status,
priority: row.priority,
payload: row.payload,
result: row.result,
error: row.error,
attempts: row.attempts,
max_attempts: row.max_attempts,
workflow_run_id: row.workflow_run_id,
created_at: row.created_at,
updated_at: row.updated_at,
finished_at: row.finished_at
};
}
export async function createTaskRunRecord(input: CreateTaskInput) {
const now = new Date().toISOString();
const [row] = await db
.insert(taskRun)
.values({
id: input.id,
user_id: input.user_id,
task_type: input.task_type,
status: 'queued',
priority: input.priority,
payload: input.payload,
result: null,
error: null,
attempts: 0,
max_attempts: input.max_attempts,
workflow_run_id: null,
created_at: now,
updated_at: now,
finished_at: null
})
.returning();
return toTask(row);
}
export async function setTaskWorkflowRunId(taskId: string, workflowRunId: string) {
await db
.update(taskRun)
.set({
workflow_run_id: workflowRunId,
updated_at: new Date().toISOString()
})
.where(eq(taskRun.id, taskId));
}
export async function getTaskByIdForUser(taskId: string, userId: string) {
const [row] = await db
.select()
.from(taskRun)
.where(and(eq(taskRun.id, taskId), eq(taskRun.user_id, userId)))
.limit(1);
return row ? toTask(row) : null;
}
export async function listRecentTasksForUser(
userId: string,
limit = 20,
statuses?: TaskStatus[]
) {
const safeLimit = Math.min(Math.max(Math.trunc(limit), 1), 200);
const rows = statuses && statuses.length > 0
? await db
.select()
.from(taskRun)
.where(and(eq(taskRun.user_id, userId), inArray(taskRun.status, statuses)))
.orderBy(desc(taskRun.created_at))
.limit(safeLimit)
: await db
.select()
.from(taskRun)
.where(eq(taskRun.user_id, userId))
.orderBy(desc(taskRun.created_at))
.limit(safeLimit);
return rows.map(toTask);
}
export async function countTasksByStatus() {
const rows = await db
.select({
status: taskRun.status,
count: sql<string>`count(*)`
})
.from(taskRun)
.groupBy(taskRun.status);
const queue: Record<string, number> = {};
for (const row of rows) {
queue[row.status] = Number(row.count);
}
return queue;
}
export async function claimQueuedTask(taskId: string) {
const [row] = await db
.update(taskRun)
.set({
status: 'running',
attempts: sql`${taskRun.attempts} + 1`,
updated_at: new Date().toISOString()
})
.where(and(eq(taskRun.id, taskId), eq(taskRun.status, 'queued')))
.returning();
return row ? toTask(row) : null;
}
export async function completeTask(taskId: string, result: Record<string, unknown>) {
const [row] = await db
.update(taskRun)
.set({
status: 'completed',
result,
error: null,
updated_at: new Date().toISOString(),
finished_at: new Date().toISOString()
})
.where(eq(taskRun.id, taskId))
.returning();
return row ? toTask(row) : null;
}
export async function markTaskFailure(taskId: string, reason: string) {
const [current] = await db
.select()
.from(taskRun)
.where(eq(taskRun.id, taskId))
.limit(1);
if (!current) {
return {
task: null,
shouldRetry: false
};
}
const shouldRetry = current.attempts < current.max_attempts;
const [updated] = await db
.update(taskRun)
.set({
status: shouldRetry ? 'queued' : 'failed',
error: reason,
updated_at: new Date().toISOString(),
finished_at: shouldRetry ? null : new Date().toISOString()
})
.where(eq(taskRun.id, taskId))
.returning();
return {
task: updated ? toTask(updated) : null,
shouldRetry
};
}
export async function getTaskById(taskId: string) {
const [row] = await db
.select()
.from(taskRun)
.where(eq(taskRun.id, taskId))
.limit(1);
return row ? toTask(row) : null;
}

View File

@@ -0,0 +1,63 @@
import { and, desc, eq } from 'drizzle-orm';
import type { WatchlistItem } from '@/lib/types';
import { db } from '@/lib/server/db';
import { watchlistItem } from '@/lib/server/db/schema';
type WatchlistRow = typeof watchlistItem.$inferSelect;
function toWatchlistItem(row: WatchlistRow): WatchlistItem {
return {
id: row.id,
user_id: row.user_id,
ticker: row.ticker,
company_name: row.company_name,
sector: row.sector,
created_at: row.created_at
};
}
export async function listWatchlistItems(userId: string) {
const rows = await db
.select()
.from(watchlistItem)
.where(eq(watchlistItem.user_id, userId))
.orderBy(desc(watchlistItem.created_at));
return rows.map(toWatchlistItem);
}
export async function upsertWatchlistItemRecord(input: {
userId: string;
ticker: string;
companyName: string;
sector?: string;
}) {
const [row] = await db
.insert(watchlistItem)
.values({
user_id: input.userId,
ticker: input.ticker,
company_name: input.companyName,
sector: input.sector?.trim() ? input.sector.trim() : null,
created_at: new Date().toISOString()
})
.onConflictDoUpdate({
target: [watchlistItem.user_id, watchlistItem.ticker],
set: {
company_name: input.companyName,
sector: input.sector?.trim() ? input.sector.trim() : null
}
})
.returning();
return toWatchlistItem(row);
}
export async function deleteWatchlistItemRecord(userId: string, id: number) {
const removed = await db
.delete(watchlistItem)
.where(and(eq(watchlistItem.user_id, userId), eq(watchlistItem.id, id)))
.returning({ id: watchlistItem.id });
return removed.length > 0;
}

View File

@@ -35,6 +35,8 @@ type SecFiling = {
filingDate: string;
accessionNumber: string;
filingUrl: string | null;
submissionUrl: string | null;
primaryDocument: string | null;
};
const SUPPORTED_FORMS: FilingType[] = ['10-K', '10-Q', '8-K'];
@@ -71,15 +73,17 @@ function fallbackFilings(ticker: string, limit: number): SecFiling[] {
const date = new Date(Date.now() - i * 1000 * 60 * 60 * 24 * 35).toISOString().slice(0, 10);
const accessionNumber = `${Date.now()}-${i}`;
filings.push({
ticker: normalized,
cik: String(100000 + i),
companyName,
filingType,
filingDate: date,
accessionNumber,
filingUrl: null
});
filings.push({
ticker: normalized,
cik: String(100000 + i),
companyName,
filingType,
filingDate: date,
accessionNumber,
filingUrl: null,
submissionUrl: null,
primaryDocument: null
});
}
return filings;
@@ -174,6 +178,7 @@ export async function fetchRecentFilings(ticker: string, limit = 20): Promise<Se
const cikPadded = company.cik.padStart(10, '0');
const payload = await fetchJson<RecentFilingsPayload>(`https://data.sec.gov/submissions/CIK${cikPadded}.json`);
const recent = payload.filings?.recent;
const submissionUrl = `https://data.sec.gov/submissions/CIK${cikPadded}.json`;
if (!recent) {
return fallbackFilings(company.ticker, safeLimit);
@@ -210,7 +215,9 @@ export async function fetchRecentFilings(ticker: string, limit = 20): Promise<Se
filingType,
filingDate: filingDates[i] ?? todayIso(),
accessionNumber,
filingUrl
filingUrl,
submissionUrl,
primaryDocument: documentName ?? null
});
if (filings.length >= safeLimit) {

View File

@@ -1,84 +0,0 @@
import { mkdir, readFile, rename, writeFile } from 'node:fs/promises';
import path from 'node:path';
import type { Filing, Holding, PortfolioInsight, Task, WatchlistItem } from '@/lib/types';
export type DataStore = {
counters: {
watchlist: number;
holdings: number;
filings: number;
insights: number;
};
watchlist: WatchlistItem[];
holdings: Holding[];
filings: Filing[];
tasks: Task[];
insights: PortfolioInsight[];
};
const DATA_DIR = path.join(process.cwd(), 'data');
const STORE_PATH = path.join(DATA_DIR, 'store.json');
let writeQueue = Promise.resolve();
function createDefaultStore(): DataStore {
return {
counters: {
watchlist: 0,
holdings: 0,
filings: 0,
insights: 0
},
watchlist: [],
holdings: [],
filings: [],
tasks: [],
insights: []
};
}
async function ensureStoreFile() {
await mkdir(DATA_DIR, { recursive: true });
try {
await readFile(STORE_PATH, 'utf8');
} catch {
const defaultStore = createDefaultStore();
defaultStore.counters.insights = defaultStore.insights.length;
await writeFile(STORE_PATH, JSON.stringify(defaultStore, null, 2), 'utf8');
}
}
async function readStore(): Promise<DataStore> {
await ensureStoreFile();
const raw = await readFile(STORE_PATH, 'utf8');
return JSON.parse(raw) as DataStore;
}
async function writeStore(store: DataStore) {
const tempPath = `${STORE_PATH}.tmp`;
await writeFile(tempPath, JSON.stringify(store, null, 2), 'utf8');
await rename(tempPath, STORE_PATH);
}
function cloneStore(store: DataStore): DataStore {
return JSON.parse(JSON.stringify(store)) as DataStore;
}
export async function getStoreSnapshot() {
const store = await readStore();
return cloneStore(store);
}
export async function withStore<T>(mutator: (store: DataStore) => T | Promise<T>): Promise<T> {
const run = async () => {
const store = await readStore();
const result = await mutator(store);
await writeStore(store);
return result;
};
const nextRun = writeQueue.then(run, run);
writeQueue = nextRun.then(() => undefined, () => undefined);
return await nextRun;
}

View File

@@ -0,0 +1,218 @@
import type { Filing, Holding, Task } from '@/lib/types';
import { runOpenClawAnalysis } from '@/lib/server/openclaw';
import { buildPortfolioSummary } from '@/lib/server/portfolio';
import { getQuote } from '@/lib/server/prices';
import {
getFilingByAccession,
saveFilingAnalysis,
upsertFilingsRecords
} from '@/lib/server/repos/filings';
import {
applyRefreshedPrices,
listHoldingsForPriceRefresh,
listUserHoldings
} from '@/lib/server/repos/holdings';
import { createPortfolioInsight } from '@/lib/server/repos/insights';
import { fetchFilingMetrics, fetchRecentFilings } from '@/lib/server/sec';
function toTaskResult(value: unknown): Record<string, unknown> {
if (!value || typeof value !== 'object' || Array.isArray(value)) {
return { value };
}
return value as Record<string, unknown>;
}
function parseTicker(raw: unknown) {
if (typeof raw !== 'string' || raw.trim().length < 1) {
throw new Error('Ticker is required');
}
return raw.trim().toUpperCase();
}
function parseLimit(raw: unknown, fallback: number, min: number, max: number) {
const numberValue = typeof raw === 'number' ? raw : Number(raw);
if (!Number.isFinite(numberValue)) {
return fallback;
}
const intValue = Math.trunc(numberValue);
return Math.min(Math.max(intValue, min), max);
}
function filingLinks(filing: {
filingUrl: string | null;
submissionUrl: string | null;
}) {
const links: Array<{ link_type: string; url: string }> = [];
if (filing.filingUrl) {
links.push({ link_type: 'primary_document', url: filing.filingUrl });
}
if (filing.submissionUrl) {
links.push({ link_type: 'submission_index', url: filing.submissionUrl });
}
return links;
}
async function processSyncFilings(task: Task) {
const ticker = parseTicker(task.payload.ticker);
const limit = parseLimit(task.payload.limit, 20, 1, 50);
const filings = await fetchRecentFilings(ticker, limit);
const metricsByCik = new Map<string, Filing['metrics']>();
for (const filing of filings) {
if (!metricsByCik.has(filing.cik)) {
const metrics = await fetchFilingMetrics(filing.cik, filing.ticker);
metricsByCik.set(filing.cik, metrics);
}
}
const saveResult = await upsertFilingsRecords(
filings.map((filing) => ({
ticker: filing.ticker,
filing_type: filing.filingType,
filing_date: filing.filingDate,
accession_number: filing.accessionNumber,
cik: filing.cik,
company_name: filing.companyName,
filing_url: filing.filingUrl,
submission_url: filing.submissionUrl,
primary_document: filing.primaryDocument,
metrics: metricsByCik.get(filing.cik) ?? null,
links: filingLinks(filing)
}))
);
return {
ticker,
fetched: filings.length,
inserted: saveResult.inserted,
updated: saveResult.updated
};
}
async function processRefreshPrices(task: Task) {
const userId = task.user_id;
if (!userId) {
throw new Error('Task is missing user scope');
}
const userHoldings = await listHoldingsForPriceRefresh(userId);
const tickers = [...new Set(userHoldings.map((entry) => entry.ticker))];
const quotes = new Map<string, number>();
for (const ticker of tickers) {
const quote = await getQuote(ticker);
quotes.set(ticker, quote);
}
const updatedCount = await applyRefreshedPrices(userId, quotes, new Date().toISOString());
return {
updatedCount,
totalTickers: tickers.length
};
}
async function processAnalyzeFiling(task: Task) {
const accessionNumber = typeof task.payload.accessionNumber === 'string'
? task.payload.accessionNumber
: '';
if (!accessionNumber) {
throw new Error('accessionNumber is required');
}
const filing = await getFilingByAccession(accessionNumber);
if (!filing) {
throw new Error(`Filing ${accessionNumber} not found`);
}
const prompt = [
'You are a fiscal research assistant focused on regulatory signals.',
`Analyze this SEC filing from ${filing.company_name} (${filing.ticker}).`,
`Form: ${filing.filing_type}`,
`Filed: ${filing.filing_date}`,
`Metrics: ${JSON.stringify(filing.metrics ?? {})}`,
'Return concise sections: Thesis, Red Flags, Follow-up Questions, Portfolio Impact.'
].join('\n');
const analysis = await runOpenClawAnalysis(prompt, 'Use concise institutional analyst language.');
await saveFilingAnalysis(accessionNumber, {
provider: analysis.provider,
model: analysis.model,
text: analysis.text
});
return {
accessionNumber,
provider: analysis.provider,
model: analysis.model
};
}
function holdingDigest(holdings: Holding[]) {
return holdings.map((holding) => ({
ticker: holding.ticker,
shares: holding.shares,
avgCost: holding.avg_cost,
currentPrice: holding.current_price,
marketValue: holding.market_value,
gainLoss: holding.gain_loss,
gainLossPct: holding.gain_loss_pct
}));
}
async function processPortfolioInsights(task: Task) {
const userId = task.user_id;
if (!userId) {
throw new Error('Task is missing user scope');
}
const userHoldings = await listUserHoldings(userId);
const summary = buildPortfolioSummary(userHoldings);
const prompt = [
'Generate portfolio intelligence with actionable recommendations.',
`Portfolio summary: ${JSON.stringify(summary)}`,
`Holdings: ${JSON.stringify(holdingDigest(userHoldings))}`,
'Respond with: 1) health score (0-100), 2) top 3 risks, 3) top 3 opportunities, 4) next actions in 7 days.'
].join('\n');
const analysis = await runOpenClawAnalysis(prompt, 'Act as a risk-aware buy-side analyst.');
await createPortfolioInsight({
userId,
provider: analysis.provider,
model: analysis.model,
content: analysis.text
});
return {
provider: analysis.provider,
model: analysis.model,
summary
};
}
export async function runTaskProcessor(task: Task) {
switch (task.task_type) {
case 'sync_filings':
return toTaskResult(await processSyncFilings(task));
case 'refresh_prices':
return toTaskResult(await processRefreshPrices(task));
case 'analyze_filing':
return toTaskResult(await processAnalyzeFiling(task));
case 'portfolio_insights':
return toTaskResult(await processPortfolioInsights(task));
default:
throw new Error(`Unsupported task type: ${task.task_type}`);
}
}

View File

@@ -1,10 +1,15 @@
import { randomUUID } from 'node:crypto';
import type { Filing, Holding, PortfolioInsight, Task, TaskStatus, TaskType } from '@/lib/types';
import { runOpenClawAnalysis } from '@/lib/server/openclaw';
import { buildPortfolioSummary, recalculateHolding } from '@/lib/server/portfolio';
import { getQuote } from '@/lib/server/prices';
import { fetchFilingMetrics, fetchRecentFilings } from '@/lib/server/sec';
import { getStoreSnapshot, withStore } from '@/lib/server/store';
import { start } from 'workflow/api';
import type { Task, TaskStatus, TaskType } from '@/lib/types';
import { runTaskWorkflow } from '@/app/workflows/task-runner';
import {
countTasksByStatus,
createTaskRunRecord,
getTaskByIdForUser,
listRecentTasksForUser,
markTaskFailure,
setTaskWorkflowRunId
} from '@/lib/server/repos/tasks';
type EnqueueTaskInput = {
userId: string;
@@ -14,410 +19,41 @@ type EnqueueTaskInput = {
maxAttempts?: number;
};
const activeTaskRuns = new Set<string>();
function nowIso() {
return new Date().toISOString();
}
function toTaskResult(value: unknown): Record<string, unknown> {
if (!value || typeof value !== 'object' || Array.isArray(value)) {
return { value };
}
return value as Record<string, unknown>;
}
function parseTicker(raw: unknown) {
if (typeof raw !== 'string' || raw.trim().length < 1) {
throw new Error('Ticker is required');
}
return raw.trim().toUpperCase();
}
function parseLimit(raw: unknown, fallback: number, min: number, max: number) {
const numberValue = typeof raw === 'number' ? raw : Number(raw);
if (!Number.isFinite(numberValue)) {
return fallback;
}
const intValue = Math.trunc(numberValue);
return Math.min(Math.max(intValue, min), max);
}
function queueTaskRun(taskId: string, delayMs = 40) {
setTimeout(() => {
void processTask(taskId);
}, delayMs);
}
async function markTask(taskId: string, mutator: (task: Task) => void) {
await withStore((store) => {
const index = store.tasks.findIndex((task) => task.id === taskId);
if (index < 0) {
return;
}
const task = store.tasks[index];
mutator(task);
task.updated_at = nowIso();
});
}
async function processSyncFilings(task: Task) {
const ticker = parseTicker(task.payload.ticker);
const limit = parseLimit(task.payload.limit, 20, 1, 50);
const filings = await fetchRecentFilings(ticker, limit);
const metricsByCik = new Map<string, Filing['metrics']>();
for (const filing of filings) {
if (!metricsByCik.has(filing.cik)) {
const metrics = await fetchFilingMetrics(filing.cik, filing.ticker);
metricsByCik.set(filing.cik, metrics);
}
}
let insertedCount = 0;
let updatedCount = 0;
await withStore((store) => {
for (const filing of filings) {
const existingIndex = store.filings.findIndex((entry) => entry.accession_number === filing.accessionNumber);
const timestamp = nowIso();
const metrics = metricsByCik.get(filing.cik) ?? null;
if (existingIndex >= 0) {
const existing = store.filings[existingIndex];
store.filings[existingIndex] = {
...existing,
ticker: filing.ticker,
cik: filing.cik,
filing_type: filing.filingType,
filing_date: filing.filingDate,
company_name: filing.companyName,
filing_url: filing.filingUrl,
metrics,
updated_at: timestamp
};
updatedCount += 1;
} else {
store.counters.filings += 1;
store.filings.unshift({
id: store.counters.filings,
ticker: filing.ticker,
filing_type: filing.filingType,
filing_date: filing.filingDate,
accession_number: filing.accessionNumber,
cik: filing.cik,
company_name: filing.companyName,
filing_url: filing.filingUrl,
metrics,
analysis: null,
created_at: timestamp,
updated_at: timestamp
});
insertedCount += 1;
}
}
store.filings.sort((a, b) => {
const byDate = Date.parse(b.filing_date) - Date.parse(a.filing_date);
return Number.isFinite(byDate) && byDate !== 0
? byDate
: Date.parse(b.updated_at) - Date.parse(a.updated_at);
});
});
return {
ticker,
fetched: filings.length,
inserted: insertedCount,
updated: updatedCount
};
}
async function processRefreshPrices(task: Task) {
const userId = task.user_id;
if (!userId) {
throw new Error('Task is missing user scope');
}
const snapshot = await getStoreSnapshot();
const userHoldings = snapshot.holdings.filter((holding) => holding.user_id === userId);
const tickers = [...new Set(userHoldings.map((holding) => holding.ticker))];
const quotes = new Map<string, number>();
for (const ticker of tickers) {
const quote = await getQuote(ticker);
quotes.set(ticker, quote);
}
let updatedCount = 0;
const updateTime = nowIso();
await withStore((store) => {
store.holdings = store.holdings.map((holding) => {
if (holding.user_id !== userId) {
return holding;
}
const quote = quotes.get(holding.ticker);
if (quote === undefined) {
return holding;
}
updatedCount += 1;
return recalculateHolding({
...holding,
current_price: quote.toFixed(6),
last_price_at: updateTime,
updated_at: updateTime
});
});
});
return {
updatedCount,
totalTickers: tickers.length
};
}
async function processAnalyzeFiling(task: Task) {
const accessionNumber = typeof task.payload.accessionNumber === 'string'
? task.payload.accessionNumber
: '';
if (!accessionNumber) {
throw new Error('accessionNumber is required');
}
const snapshot = await getStoreSnapshot();
const filing = snapshot.filings.find((entry) => entry.accession_number === accessionNumber);
if (!filing) {
throw new Error(`Filing ${accessionNumber} not found`);
}
const prompt = [
'You are a fiscal research assistant focused on regulatory signals.',
`Analyze this SEC filing from ${filing.company_name} (${filing.ticker}).`,
`Form: ${filing.filing_type}`,
`Filed: ${filing.filing_date}`,
`Metrics: ${JSON.stringify(filing.metrics ?? {})}`,
'Return concise sections: Thesis, Red Flags, Follow-up Questions, Portfolio Impact.'
].join('\n');
const analysis = await runOpenClawAnalysis(prompt, 'Use concise institutional analyst language.');
await withStore((store) => {
const index = store.filings.findIndex((entry) => entry.accession_number === accessionNumber);
if (index < 0) {
return;
}
store.filings[index] = {
...store.filings[index],
analysis: {
provider: analysis.provider,
model: analysis.model,
text: analysis.text
},
updated_at: nowIso()
};
});
return {
accessionNumber,
provider: analysis.provider,
model: analysis.model
};
}
function holdingDigest(holdings: Holding[]) {
return holdings.map((holding) => ({
ticker: holding.ticker,
shares: holding.shares,
avgCost: holding.avg_cost,
currentPrice: holding.current_price,
marketValue: holding.market_value,
gainLoss: holding.gain_loss,
gainLossPct: holding.gain_loss_pct
}));
}
async function processPortfolioInsights(task: Task) {
const userId = task.user_id;
if (!userId) {
throw new Error('Task is missing user scope');
}
const snapshot = await getStoreSnapshot();
const userHoldings = snapshot.holdings.filter((holding) => holding.user_id === userId);
const summary = buildPortfolioSummary(userHoldings);
const prompt = [
'Generate portfolio intelligence with actionable recommendations.',
`Portfolio summary: ${JSON.stringify(summary)}`,
`Holdings: ${JSON.stringify(holdingDigest(userHoldings))}`,
'Respond with: 1) health score (0-100), 2) top 3 risks, 3) top 3 opportunities, 4) next actions in 7 days.'
].join('\n');
const analysis = await runOpenClawAnalysis(prompt, 'Act as a risk-aware buy-side analyst.');
const createdAt = nowIso();
await withStore((store) => {
store.counters.insights += 1;
const insight: PortfolioInsight = {
id: store.counters.insights,
user_id: userId,
provider: analysis.provider,
model: analysis.model,
content: analysis.text,
created_at: createdAt
};
store.insights.unshift(insight);
});
return {
provider: analysis.provider,
model: analysis.model,
summary
};
}
async function runTaskProcessor(task: Task) {
switch (task.task_type) {
case 'sync_filings':
return await processSyncFilings(task);
case 'refresh_prices':
return await processRefreshPrices(task);
case 'analyze_filing':
return await processAnalyzeFiling(task);
case 'portfolio_insights':
return await processPortfolioInsights(task);
default:
throw new Error(`Unsupported task type: ${task.task_type}`);
}
}
async function processTask(taskId: string) {
if (activeTaskRuns.has(taskId)) {
return;
}
activeTaskRuns.add(taskId);
try {
const task = await withStore((store) => {
const index = store.tasks.findIndex((entry) => entry.id === taskId);
if (index < 0) {
return null;
}
const target = store.tasks[index];
if (target.status !== 'queued') {
return null;
}
target.status = 'running';
target.attempts += 1;
target.updated_at = nowIso();
return { ...target };
});
if (!task) {
return;
}
try {
const result = toTaskResult(await runTaskProcessor(task));
await markTask(taskId, (target) => {
target.status = 'completed';
target.result = result;
target.error = null;
target.finished_at = nowIso();
});
} catch (error) {
const reason = error instanceof Error ? error.message : 'Task failed unexpectedly';
const shouldRetry = task.attempts < task.max_attempts;
if (shouldRetry) {
await markTask(taskId, (target) => {
target.status = 'queued';
target.error = reason;
});
queueTaskRun(taskId, 1200);
} else {
await markTask(taskId, (target) => {
target.status = 'failed';
target.error = reason;
target.finished_at = nowIso();
});
}
}
} finally {
activeTaskRuns.delete(taskId);
}
}
export async function enqueueTask(input: EnqueueTaskInput) {
const createdAt = nowIso();
const task: Task = {
const task = await createTaskRunRecord({
id: randomUUID(),
user_id: input.userId,
task_type: input.taskType,
status: 'queued',
priority: input.priority ?? 50,
payload: input.payload ?? {},
result: null,
error: null,
attempts: 0,
max_attempts: input.maxAttempts ?? 3,
created_at: createdAt,
updated_at: createdAt,
finished_at: null
};
await withStore((store) => {
store.tasks.unshift(task);
store.tasks.sort((a, b) => {
if (a.priority !== b.priority) {
return b.priority - a.priority;
}
return Date.parse(b.created_at) - Date.parse(a.created_at);
});
priority: input.priority ?? 50,
max_attempts: input.maxAttempts ?? 3
});
queueTaskRun(task.id);
return task;
try {
const run = await start(runTaskWorkflow, [task.id]);
await setTaskWorkflowRunId(task.id, run.runId);
return {
...task,
workflow_run_id: run.runId
} satisfies Task;
} catch (error) {
const reason = error instanceof Error
? error.message
: 'Failed to start workflow';
await markTaskFailure(task.id, reason);
throw error;
}
}
export async function getTaskById(taskId: string, userId: string) {
const snapshot = await getStoreSnapshot();
return snapshot.tasks.find((task) => task.id === taskId && task.user_id === userId) ?? null;
return await getTaskByIdForUser(taskId, userId);
}
export async function listRecentTasks(userId: string, limit = 20, statuses?: TaskStatus[]) {
const safeLimit = Math.min(Math.max(Math.trunc(limit), 1), 200);
const snapshot = await getStoreSnapshot();
const scoped = snapshot.tasks.filter((task) => task.user_id === userId);
const filtered = statuses && statuses.length > 0
? scoped.filter((task) => statuses.includes(task.status))
: scoped;
return filtered
.slice()
.sort((a, b) => Date.parse(b.created_at) - Date.parse(a.created_at))
.slice(0, safeLimit);
return await listRecentTasksForUser(userId, limit, statuses);
}
export async function getTaskQueueSnapshot() {
return await countTasksByStatus();
}

View File

@@ -46,6 +46,8 @@ export type Filing = {
cik: string;
company_name: string;
filing_url: string | null;
submission_url?: string | null;
primary_document?: string | null;
metrics: {
revenue: number | null;
netIncome: number | null;
@@ -77,6 +79,7 @@ export type Task = {
error: string | null;
attempts: number;
max_attempts: number;
workflow_run_id?: string | null;
created_at: string;
updated_at: string;
finished_at: string | null;