chore: commit all changes
This commit is contained in:
@@ -22,3 +22,9 @@ OPENCLAW_MODEL=zeroclaw
|
|||||||
|
|
||||||
# SEC API etiquette
|
# SEC API etiquette
|
||||||
SEC_USER_AGENT=Fiscal Clone <support@fiscal.local>
|
SEC_USER_AGENT=Fiscal Clone <support@fiscal.local>
|
||||||
|
|
||||||
|
# Workflow runtime (Postgres world)
|
||||||
|
WORKFLOW_TARGET_WORLD=@workflow/world-postgres
|
||||||
|
WORKFLOW_POSTGRES_URL=postgres://postgres:postgres@localhost:5432/fiscal_clone
|
||||||
|
WORKFLOW_POSTGRES_JOB_PREFIX=fiscal_clone
|
||||||
|
WORKFLOW_POSTGRES_WORKER_CONCURRENCY=10
|
||||||
|
|||||||
17
README.md
17
README.md
@@ -10,8 +10,10 @@ Turbopack-first rebuild of a fiscal.ai-style terminal with OpenClaw integration.
|
|||||||
- Turbopack for `dev` and `build`
|
- Turbopack for `dev` and `build`
|
||||||
- Better Auth (email/password + magic link)
|
- Better Auth (email/password + magic link)
|
||||||
- Drizzle ORM (PostgreSQL) + Better Auth Drizzle adapter
|
- Drizzle ORM (PostgreSQL) + Better Auth Drizzle adapter
|
||||||
- Internal API routes via Elysia catch-all (`app/api/[[...slugs]]/route.ts`)
|
- Internal API routes via Elysia app module (`lib/server/api/app.ts`)
|
||||||
- Durable local task engine and JSON data store
|
- Eden Treaty for type-safe frontend API calls
|
||||||
|
- Workflow DevKit + Postgres World for durable background task execution
|
||||||
|
- PostgreSQL-backed domain storage (watchlist, holdings, filings, tasks, insights)
|
||||||
- OpenClaw/ZeroClaw analysis via OpenAI-compatible chat endpoint
|
- OpenClaw/ZeroClaw analysis via OpenAI-compatible chat endpoint
|
||||||
|
|
||||||
## Run locally
|
## Run locally
|
||||||
@@ -24,11 +26,12 @@ bun run dev
|
|||||||
Open [http://localhost:3000](http://localhost:3000).
|
Open [http://localhost:3000](http://localhost:3000).
|
||||||
|
|
||||||
Better Auth requires PostgreSQL. Set `DATABASE_URL`, `BETTER_AUTH_SECRET`, and `BETTER_AUTH_BASE_URL` in `.env.local`.
|
Better Auth requires PostgreSQL. Set `DATABASE_URL`, `BETTER_AUTH_SECRET`, and `BETTER_AUTH_BASE_URL` in `.env.local`.
|
||||||
Generate and apply auth schema migrations before starting the app:
|
Generate/apply schema migrations and set up the workflow world tables before starting the app:
|
||||||
|
|
||||||
```bash
|
```bash
|
||||||
bun run db:generate
|
bun run db:generate
|
||||||
bun run db:migrate
|
bun run db:migrate
|
||||||
|
bun run workflow:setup
|
||||||
```
|
```
|
||||||
|
|
||||||
## Production build
|
## Production build
|
||||||
@@ -51,7 +54,6 @@ The base Docker Compose now includes an internal PostgreSQL service (`postgres`)
|
|||||||
On container startup, the app now applies Drizzle migrations automatically before launching Next.js.
|
On container startup, the app now applies Drizzle migrations automatically before launching Next.js.
|
||||||
For Coolify/remote Docker Compose, only app container port `3000` is exposed internally (no fixed host port bind), avoiding host port collisions.
|
For Coolify/remote Docker Compose, only app container port `3000` is exposed internally (no fixed host port bind), avoiding host port collisions.
|
||||||
If you use an external Postgres instance, set `DATABASE_URL` explicitly.
|
If you use an external Postgres instance, set `DATABASE_URL` explicitly.
|
||||||
Runtime data persists in the `app_data` volume (`/app/data` in container).
|
|
||||||
Docker images use Bun (`oven/bun:1.3.5-alpine`) for build and runtime.
|
Docker images use Bun (`oven/bun:1.3.5-alpine`) for build and runtime.
|
||||||
|
|
||||||
## Environment
|
## Environment
|
||||||
@@ -73,13 +75,18 @@ OPENCLAW_BASE_URL=http://localhost:4000
|
|||||||
OPENCLAW_API_KEY=your_key
|
OPENCLAW_API_KEY=your_key
|
||||||
OPENCLAW_MODEL=zeroclaw
|
OPENCLAW_MODEL=zeroclaw
|
||||||
SEC_USER_AGENT=Fiscal Clone <support@fiscal.local>
|
SEC_USER_AGENT=Fiscal Clone <support@fiscal.local>
|
||||||
|
|
||||||
|
WORKFLOW_TARGET_WORLD=@workflow/world-postgres
|
||||||
|
WORKFLOW_POSTGRES_URL=postgres://postgres:postgres@localhost:5432/fiscal_clone
|
||||||
|
WORKFLOW_POSTGRES_JOB_PREFIX=fiscal_clone
|
||||||
|
WORKFLOW_POSTGRES_WORKER_CONCURRENCY=10
|
||||||
```
|
```
|
||||||
|
|
||||||
If OpenClaw is unset, the app uses local fallback analysis so task workflows still run.
|
If OpenClaw is unset, the app uses local fallback analysis so task workflows still run.
|
||||||
|
|
||||||
## API surface
|
## API surface
|
||||||
|
|
||||||
All endpoints below are handled by Elysia in `app/api/[[...slugs]]/route.ts`.
|
All endpoints below are defined in Elysia at `lib/server/api/app.ts` and exposed via `app/api/[[...slugs]]/route.ts`.
|
||||||
|
|
||||||
- `ALL /api/auth/*` (Better Auth handler)
|
- `ALL /api/auth/*` (Better Auth handler)
|
||||||
- `GET /api/health`
|
- `GET /api/health`
|
||||||
|
|||||||
@@ -1,512 +1,4 @@
|
|||||||
import { Elysia } from 'elysia';
|
import { app } from '@/lib/server/api/app';
|
||||||
import type { Holding, TaskStatus, WatchlistItem } from '@/lib/types';
|
|
||||||
import { auth } from '@/lib/auth';
|
|
||||||
import { requireAuthenticatedSession } from '@/lib/server/auth-session';
|
|
||||||
import { asErrorMessage, jsonError } from '@/lib/server/http';
|
|
||||||
import { buildPortfolioSummary, recalculateHolding } from '@/lib/server/portfolio';
|
|
||||||
import { getStoreSnapshot, withStore } from '@/lib/server/store';
|
|
||||||
import { enqueueTask, getTaskById, listRecentTasks } from '@/lib/server/tasks';
|
|
||||||
|
|
||||||
const ALLOWED_STATUSES: TaskStatus[] = ['queued', 'running', 'completed', 'failed'];
|
|
||||||
|
|
||||||
function nowIso() {
|
|
||||||
return new Date().toISOString();
|
|
||||||
}
|
|
||||||
|
|
||||||
function asRecord(value: unknown): Record<string, unknown> {
|
|
||||||
if (!value || typeof value !== 'object' || Array.isArray(value)) {
|
|
||||||
return {};
|
|
||||||
}
|
|
||||||
|
|
||||||
return value as Record<string, unknown>;
|
|
||||||
}
|
|
||||||
|
|
||||||
function asPositiveNumber(value: unknown) {
|
|
||||||
const parsed = typeof value === 'number' ? value : Number(value);
|
|
||||||
return Number.isFinite(parsed) && parsed > 0 ? parsed : null;
|
|
||||||
}
|
|
||||||
|
|
||||||
const authHandler = ({ request }: { request: Request }) => auth.handler(request);
|
|
||||||
|
|
||||||
export const app = new Elysia({ prefix: '/api' })
|
|
||||||
.all('/auth', authHandler)
|
|
||||||
.all('/auth/*', authHandler)
|
|
||||||
.get('/health', async () => {
|
|
||||||
const snapshot = await getStoreSnapshot();
|
|
||||||
const queue = snapshot.tasks.reduce<Record<string, number>>((acc, task) => {
|
|
||||||
acc[task.status] = (acc[task.status] ?? 0) + 1;
|
|
||||||
return acc;
|
|
||||||
}, {});
|
|
||||||
|
|
||||||
return Response.json({
|
|
||||||
status: 'ok',
|
|
||||||
version: '3.0.0',
|
|
||||||
timestamp: new Date().toISOString(),
|
|
||||||
queue
|
|
||||||
});
|
|
||||||
})
|
|
||||||
.get('/me', async () => {
|
|
||||||
const { session, response } = await requireAuthenticatedSession();
|
|
||||||
if (response) {
|
|
||||||
return response;
|
|
||||||
}
|
|
||||||
|
|
||||||
return Response.json({
|
|
||||||
user: {
|
|
||||||
id: session.user.id,
|
|
||||||
email: session.user.email,
|
|
||||||
name: session.user.name,
|
|
||||||
image: session.user.image
|
|
||||||
}
|
|
||||||
});
|
|
||||||
})
|
|
||||||
.get('/watchlist', async () => {
|
|
||||||
const { session, response } = await requireAuthenticatedSession();
|
|
||||||
if (response) {
|
|
||||||
return response;
|
|
||||||
}
|
|
||||||
|
|
||||||
const snapshot = await getStoreSnapshot();
|
|
||||||
const items = snapshot.watchlist
|
|
||||||
.filter((item) => item.user_id === session.user.id)
|
|
||||||
.slice()
|
|
||||||
.sort((a, b) => Date.parse(b.created_at) - Date.parse(a.created_at));
|
|
||||||
|
|
||||||
return Response.json({ items });
|
|
||||||
})
|
|
||||||
.post('/watchlist', async ({ body }) => {
|
|
||||||
const { session, response } = await requireAuthenticatedSession();
|
|
||||||
if (response) {
|
|
||||||
return response;
|
|
||||||
}
|
|
||||||
|
|
||||||
const payload = asRecord(body);
|
|
||||||
const ticker = typeof payload.ticker === 'string' ? payload.ticker.trim().toUpperCase() : '';
|
|
||||||
const companyName = typeof payload.companyName === 'string' ? payload.companyName.trim() : '';
|
|
||||||
const sector = typeof payload.sector === 'string' ? payload.sector.trim() : '';
|
|
||||||
|
|
||||||
if (!ticker) {
|
|
||||||
return jsonError('ticker is required');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (!companyName) {
|
|
||||||
return jsonError('companyName is required');
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
let item: WatchlistItem | null = null;
|
|
||||||
|
|
||||||
await withStore((store) => {
|
|
||||||
const existingIndex = store.watchlist.findIndex((entry) => {
|
|
||||||
return entry.user_id === session.user.id && entry.ticker === ticker;
|
|
||||||
});
|
|
||||||
|
|
||||||
if (existingIndex >= 0) {
|
|
||||||
const existing = store.watchlist[existingIndex];
|
|
||||||
const updated: WatchlistItem = {
|
|
||||||
...existing,
|
|
||||||
company_name: companyName,
|
|
||||||
sector: sector || null
|
|
||||||
};
|
|
||||||
|
|
||||||
store.watchlist[existingIndex] = updated;
|
|
||||||
item = updated;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
store.counters.watchlist += 1;
|
|
||||||
const created: WatchlistItem = {
|
|
||||||
id: store.counters.watchlist,
|
|
||||||
user_id: session.user.id,
|
|
||||||
ticker,
|
|
||||||
company_name: companyName,
|
|
||||||
sector: sector || null,
|
|
||||||
created_at: nowIso()
|
|
||||||
};
|
|
||||||
|
|
||||||
store.watchlist.unshift(created);
|
|
||||||
item = created;
|
|
||||||
});
|
|
||||||
|
|
||||||
return Response.json({ item });
|
|
||||||
} catch (error) {
|
|
||||||
return jsonError(asErrorMessage(error, 'Failed to create watchlist item'));
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.delete('/watchlist/:id', async ({ params }) => {
|
|
||||||
const { session, response } = await requireAuthenticatedSession();
|
|
||||||
if (response) {
|
|
||||||
return response;
|
|
||||||
}
|
|
||||||
|
|
||||||
const numericId = Number(params.id);
|
|
||||||
if (!Number.isInteger(numericId) || numericId <= 0) {
|
|
||||||
return jsonError('Invalid watchlist id', 400);
|
|
||||||
}
|
|
||||||
|
|
||||||
let removed = false;
|
|
||||||
|
|
||||||
await withStore((store) => {
|
|
||||||
const next = store.watchlist.filter((item) => !(item.id === numericId && item.user_id === session.user.id));
|
|
||||||
removed = next.length !== store.watchlist.length;
|
|
||||||
store.watchlist = next;
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!removed) {
|
|
||||||
return jsonError('Watchlist item not found', 404);
|
|
||||||
}
|
|
||||||
|
|
||||||
return Response.json({ success: true });
|
|
||||||
})
|
|
||||||
.get('/portfolio/holdings', async () => {
|
|
||||||
const { session, response } = await requireAuthenticatedSession();
|
|
||||||
if (response) {
|
|
||||||
return response;
|
|
||||||
}
|
|
||||||
|
|
||||||
const snapshot = await getStoreSnapshot();
|
|
||||||
const holdings = snapshot.holdings
|
|
||||||
.filter((holding) => holding.user_id === session.user.id)
|
|
||||||
.slice()
|
|
||||||
.sort((a, b) => Number(b.market_value) - Number(a.market_value));
|
|
||||||
|
|
||||||
return Response.json({ holdings });
|
|
||||||
})
|
|
||||||
.post('/portfolio/holdings', async ({ body }) => {
|
|
||||||
const { session, response } = await requireAuthenticatedSession();
|
|
||||||
if (response) {
|
|
||||||
return response;
|
|
||||||
}
|
|
||||||
|
|
||||||
const payload = asRecord(body);
|
|
||||||
const ticker = typeof payload.ticker === 'string' ? payload.ticker.trim().toUpperCase() : '';
|
|
||||||
const shares = asPositiveNumber(payload.shares);
|
|
||||||
const avgCost = asPositiveNumber(payload.avgCost);
|
|
||||||
|
|
||||||
if (!ticker) {
|
|
||||||
return jsonError('ticker is required');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (shares === null) {
|
|
||||||
return jsonError('shares must be a positive number');
|
|
||||||
}
|
|
||||||
|
|
||||||
if (avgCost === null) {
|
|
||||||
return jsonError('avgCost must be a positive number');
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const now = nowIso();
|
|
||||||
let holding: Holding | null = null;
|
|
||||||
|
|
||||||
await withStore((store) => {
|
|
||||||
const existingIndex = store.holdings.findIndex((entry) => {
|
|
||||||
return entry.user_id === session.user.id && entry.ticker === ticker;
|
|
||||||
});
|
|
||||||
|
|
||||||
const currentPrice = asPositiveNumber(payload.currentPrice) ?? avgCost;
|
|
||||||
|
|
||||||
if (existingIndex >= 0) {
|
|
||||||
const existing = store.holdings[existingIndex];
|
|
||||||
const updated = recalculateHolding({
|
|
||||||
...existing,
|
|
||||||
ticker,
|
|
||||||
shares: shares.toFixed(6),
|
|
||||||
avg_cost: avgCost.toFixed(6),
|
|
||||||
current_price: currentPrice.toFixed(6),
|
|
||||||
updated_at: now,
|
|
||||||
last_price_at: now
|
|
||||||
});
|
|
||||||
|
|
||||||
store.holdings[existingIndex] = updated;
|
|
||||||
holding = updated;
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
store.counters.holdings += 1;
|
|
||||||
const created = recalculateHolding({
|
|
||||||
id: store.counters.holdings,
|
|
||||||
user_id: session.user.id,
|
|
||||||
ticker,
|
|
||||||
shares: shares.toFixed(6),
|
|
||||||
avg_cost: avgCost.toFixed(6),
|
|
||||||
current_price: currentPrice.toFixed(6),
|
|
||||||
market_value: '0',
|
|
||||||
gain_loss: '0',
|
|
||||||
gain_loss_pct: '0',
|
|
||||||
last_price_at: now,
|
|
||||||
created_at: now,
|
|
||||||
updated_at: now
|
|
||||||
});
|
|
||||||
|
|
||||||
store.holdings.unshift(created);
|
|
||||||
holding = created;
|
|
||||||
});
|
|
||||||
|
|
||||||
return Response.json({ holding });
|
|
||||||
} catch (error) {
|
|
||||||
return jsonError(asErrorMessage(error, 'Failed to save holding'));
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.patch('/portfolio/holdings/:id', async ({ params, body }) => {
|
|
||||||
const { session, response } = await requireAuthenticatedSession();
|
|
||||||
if (response) {
|
|
||||||
return response;
|
|
||||||
}
|
|
||||||
|
|
||||||
const numericId = Number(params.id);
|
|
||||||
if (!Number.isInteger(numericId) || numericId <= 0) {
|
|
||||||
return jsonError('Invalid holding id');
|
|
||||||
}
|
|
||||||
|
|
||||||
const payload = asRecord(body);
|
|
||||||
let found = false;
|
|
||||||
let updated: Holding | null = null;
|
|
||||||
|
|
||||||
await withStore((store) => {
|
|
||||||
const index = store.holdings.findIndex((entry) => {
|
|
||||||
return entry.id === numericId && entry.user_id === session.user.id;
|
|
||||||
});
|
|
||||||
|
|
||||||
if (index < 0) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
found = true;
|
|
||||||
const existing = store.holdings[index];
|
|
||||||
|
|
||||||
const shares = asPositiveNumber(payload.shares) ?? Number(existing.shares);
|
|
||||||
const avgCost = asPositiveNumber(payload.avgCost) ?? Number(existing.avg_cost);
|
|
||||||
const currentPrice = asPositiveNumber(payload.currentPrice) ?? Number(existing.current_price ?? existing.avg_cost);
|
|
||||||
|
|
||||||
const next = recalculateHolding({
|
|
||||||
...existing,
|
|
||||||
shares: shares.toFixed(6),
|
|
||||||
avg_cost: avgCost.toFixed(6),
|
|
||||||
current_price: currentPrice.toFixed(6),
|
|
||||||
updated_at: nowIso(),
|
|
||||||
last_price_at: nowIso()
|
|
||||||
});
|
|
||||||
|
|
||||||
store.holdings[index] = next;
|
|
||||||
updated = next;
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!found) {
|
|
||||||
return jsonError('Holding not found', 404);
|
|
||||||
}
|
|
||||||
|
|
||||||
return Response.json({ holding: updated });
|
|
||||||
})
|
|
||||||
.delete('/portfolio/holdings/:id', async ({ params }) => {
|
|
||||||
const { session, response } = await requireAuthenticatedSession();
|
|
||||||
if (response) {
|
|
||||||
return response;
|
|
||||||
}
|
|
||||||
|
|
||||||
const numericId = Number(params.id);
|
|
||||||
if (!Number.isInteger(numericId) || numericId <= 0) {
|
|
||||||
return jsonError('Invalid holding id');
|
|
||||||
}
|
|
||||||
|
|
||||||
let removed = false;
|
|
||||||
|
|
||||||
await withStore((store) => {
|
|
||||||
const next = store.holdings.filter((holding) => {
|
|
||||||
return !(holding.id === numericId && holding.user_id === session.user.id);
|
|
||||||
});
|
|
||||||
removed = next.length !== store.holdings.length;
|
|
||||||
store.holdings = next;
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!removed) {
|
|
||||||
return jsonError('Holding not found', 404);
|
|
||||||
}
|
|
||||||
|
|
||||||
return Response.json({ success: true });
|
|
||||||
})
|
|
||||||
.get('/portfolio/summary', async () => {
|
|
||||||
const { session, response } = await requireAuthenticatedSession();
|
|
||||||
if (response) {
|
|
||||||
return response;
|
|
||||||
}
|
|
||||||
|
|
||||||
const snapshot = await getStoreSnapshot();
|
|
||||||
const summary = buildPortfolioSummary(
|
|
||||||
snapshot.holdings.filter((holding) => holding.user_id === session.user.id)
|
|
||||||
);
|
|
||||||
|
|
||||||
return Response.json({ summary });
|
|
||||||
})
|
|
||||||
.post('/portfolio/refresh-prices', async () => {
|
|
||||||
const { session, response } = await requireAuthenticatedSession();
|
|
||||||
if (response) {
|
|
||||||
return response;
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const task = await enqueueTask({
|
|
||||||
userId: session.user.id,
|
|
||||||
taskType: 'refresh_prices',
|
|
||||||
payload: {},
|
|
||||||
priority: 80
|
|
||||||
});
|
|
||||||
|
|
||||||
return Response.json({ task });
|
|
||||||
} catch (error) {
|
|
||||||
return jsonError(asErrorMessage(error, 'Failed to queue refresh task'));
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.post('/portfolio/insights/generate', async () => {
|
|
||||||
const { session, response } = await requireAuthenticatedSession();
|
|
||||||
if (response) {
|
|
||||||
return response;
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const task = await enqueueTask({
|
|
||||||
userId: session.user.id,
|
|
||||||
taskType: 'portfolio_insights',
|
|
||||||
payload: {},
|
|
||||||
priority: 70
|
|
||||||
});
|
|
||||||
|
|
||||||
return Response.json({ task });
|
|
||||||
} catch (error) {
|
|
||||||
return jsonError(asErrorMessage(error, 'Failed to queue insights task'));
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.get('/portfolio/insights/latest', async () => {
|
|
||||||
const { session, response } = await requireAuthenticatedSession();
|
|
||||||
if (response) {
|
|
||||||
return response;
|
|
||||||
}
|
|
||||||
|
|
||||||
const snapshot = await getStoreSnapshot();
|
|
||||||
const insight = snapshot.insights
|
|
||||||
.filter((entry) => entry.user_id === session.user.id)
|
|
||||||
.slice()
|
|
||||||
.sort((a, b) => Date.parse(b.created_at) - Date.parse(a.created_at))[0] ?? null;
|
|
||||||
|
|
||||||
return Response.json({ insight });
|
|
||||||
})
|
|
||||||
.get('/filings', async ({ request }) => {
|
|
||||||
const { response } = await requireAuthenticatedSession();
|
|
||||||
if (response) {
|
|
||||||
return response;
|
|
||||||
}
|
|
||||||
|
|
||||||
const url = new URL(request.url);
|
|
||||||
const tickerFilter = url.searchParams.get('ticker')?.trim().toUpperCase();
|
|
||||||
const limitValue = Number(url.searchParams.get('limit') ?? 50);
|
|
||||||
const limit = Number.isFinite(limitValue)
|
|
||||||
? Math.min(Math.max(Math.trunc(limitValue), 1), 250)
|
|
||||||
: 50;
|
|
||||||
|
|
||||||
const snapshot = await getStoreSnapshot();
|
|
||||||
const filtered = tickerFilter
|
|
||||||
? snapshot.filings.filter((filing) => filing.ticker === tickerFilter)
|
|
||||||
: snapshot.filings;
|
|
||||||
|
|
||||||
const filings = filtered
|
|
||||||
.slice()
|
|
||||||
.sort((a, b) => Date.parse(b.filing_date) - Date.parse(a.filing_date))
|
|
||||||
.slice(0, limit);
|
|
||||||
|
|
||||||
return Response.json({ filings });
|
|
||||||
})
|
|
||||||
.post('/filings/sync', async ({ body }) => {
|
|
||||||
const { session, response } = await requireAuthenticatedSession();
|
|
||||||
if (response) {
|
|
||||||
return response;
|
|
||||||
}
|
|
||||||
|
|
||||||
const payload = asRecord(body);
|
|
||||||
const ticker = typeof payload.ticker === 'string' ? payload.ticker.trim().toUpperCase() : '';
|
|
||||||
|
|
||||||
if (!ticker) {
|
|
||||||
return jsonError('ticker is required');
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const limit = typeof payload.limit === 'number' ? payload.limit : Number(payload.limit);
|
|
||||||
const task = await enqueueTask({
|
|
||||||
userId: session.user.id,
|
|
||||||
taskType: 'sync_filings',
|
|
||||||
payload: {
|
|
||||||
ticker,
|
|
||||||
limit: Number.isFinite(limit) ? limit : 20
|
|
||||||
},
|
|
||||||
priority: 90
|
|
||||||
});
|
|
||||||
|
|
||||||
return Response.json({ task });
|
|
||||||
} catch (error) {
|
|
||||||
return jsonError(asErrorMessage(error, 'Failed to queue filings sync task'));
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.post('/filings/:accessionNumber/analyze', async ({ params }) => {
|
|
||||||
const { session, response } = await requireAuthenticatedSession();
|
|
||||||
if (response) {
|
|
||||||
return response;
|
|
||||||
}
|
|
||||||
|
|
||||||
const accessionNumber = params.accessionNumber?.trim() ?? '';
|
|
||||||
if (accessionNumber.length < 4) {
|
|
||||||
return jsonError('Invalid accession number');
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const task = await enqueueTask({
|
|
||||||
userId: session.user.id,
|
|
||||||
taskType: 'analyze_filing',
|
|
||||||
payload: { accessionNumber },
|
|
||||||
priority: 65
|
|
||||||
});
|
|
||||||
|
|
||||||
return Response.json({ task });
|
|
||||||
} catch (error) {
|
|
||||||
return jsonError(asErrorMessage(error, 'Failed to queue filing analysis task'));
|
|
||||||
}
|
|
||||||
})
|
|
||||||
.get('/tasks', async ({ request }) => {
|
|
||||||
const { session, response } = await requireAuthenticatedSession();
|
|
||||||
if (response) {
|
|
||||||
return response;
|
|
||||||
}
|
|
||||||
|
|
||||||
const url = new URL(request.url);
|
|
||||||
const limitValue = Number(url.searchParams.get('limit') ?? 20);
|
|
||||||
const limit = Number.isFinite(limitValue)
|
|
||||||
? Math.min(Math.max(Math.trunc(limitValue), 1), 200)
|
|
||||||
: 20;
|
|
||||||
|
|
||||||
const rawStatuses = url.searchParams.getAll('status');
|
|
||||||
const statuses = rawStatuses.filter((status): status is TaskStatus => {
|
|
||||||
return ALLOWED_STATUSES.includes(status as TaskStatus);
|
|
||||||
});
|
|
||||||
|
|
||||||
const tasks = await listRecentTasks(
|
|
||||||
session.user.id,
|
|
||||||
limit,
|
|
||||||
statuses.length > 0 ? statuses : undefined
|
|
||||||
);
|
|
||||||
|
|
||||||
return Response.json({ tasks });
|
|
||||||
})
|
|
||||||
.get('/tasks/:taskId', async ({ params }) => {
|
|
||||||
const { session, response } = await requireAuthenticatedSession();
|
|
||||||
if (response) {
|
|
||||||
return response;
|
|
||||||
}
|
|
||||||
|
|
||||||
const task = await getTaskById(params.taskId, session.user.id);
|
|
||||||
if (!task) {
|
|
||||||
return jsonError('Task not found', 404);
|
|
||||||
}
|
|
||||||
|
|
||||||
return Response.json({ task });
|
|
||||||
});
|
|
||||||
|
|
||||||
export const GET = app.fetch;
|
export const GET = app.fetch;
|
||||||
export const POST = app.fetch;
|
export const POST = app.fetch;
|
||||||
|
|||||||
54
app/workflows/task-runner.ts
Normal file
54
app/workflows/task-runner.ts
Normal file
@@ -0,0 +1,54 @@
|
|||||||
|
import { sleep } from 'workflow';
|
||||||
|
import { start } from 'workflow/api';
|
||||||
|
import { runTaskProcessor } from '@/lib/server/task-processors';
|
||||||
|
import {
|
||||||
|
claimQueuedTask,
|
||||||
|
completeTask,
|
||||||
|
markTaskFailure
|
||||||
|
} from '@/lib/server/repos/tasks';
|
||||||
|
import type { Task } from '@/lib/types';
|
||||||
|
|
||||||
|
export async function runTaskWorkflow(taskId: string) {
|
||||||
|
'use workflow';
|
||||||
|
|
||||||
|
const task = await claimQueuedTaskStep(taskId);
|
||||||
|
if (!task) {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const result = await processTaskStep(task);
|
||||||
|
await completeTaskStep(task.id, result);
|
||||||
|
} catch (error) {
|
||||||
|
const reason = error instanceof Error
|
||||||
|
? error.message
|
||||||
|
: 'Task failed unexpectedly';
|
||||||
|
|
||||||
|
const nextState = await markTaskFailureStep(task.id, reason);
|
||||||
|
|
||||||
|
if (nextState.shouldRetry) {
|
||||||
|
await sleep('1200ms');
|
||||||
|
await start(runTaskWorkflow, [task.id]);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
async function claimQueuedTaskStep(taskId: string) {
|
||||||
|
'use step';
|
||||||
|
return await claimQueuedTask(taskId);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function processTaskStep(task: Task) {
|
||||||
|
'use step';
|
||||||
|
return await runTaskProcessor(task);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function completeTaskStep(taskId: string, result: Record<string, unknown>) {
|
||||||
|
'use step';
|
||||||
|
await completeTask(taskId, result);
|
||||||
|
}
|
||||||
|
|
||||||
|
async function markTaskFailureStep(taskId: string, reason: string) {
|
||||||
|
'use step';
|
||||||
|
return await markTaskFailure(taskId, reason);
|
||||||
|
}
|
||||||
@@ -22,6 +22,10 @@ services:
|
|||||||
OPENCLAW_API_KEY: ${OPENCLAW_API_KEY:-}
|
OPENCLAW_API_KEY: ${OPENCLAW_API_KEY:-}
|
||||||
OPENCLAW_MODEL: ${OPENCLAW_MODEL:-zeroclaw}
|
OPENCLAW_MODEL: ${OPENCLAW_MODEL:-zeroclaw}
|
||||||
SEC_USER_AGENT: ${SEC_USER_AGENT:-Fiscal Clone <support@fiscal.local>}
|
SEC_USER_AGENT: ${SEC_USER_AGENT:-Fiscal Clone <support@fiscal.local>}
|
||||||
|
WORKFLOW_TARGET_WORLD: ${WORKFLOW_TARGET_WORLD:-@workflow/world-postgres}
|
||||||
|
WORKFLOW_POSTGRES_URL: ${WORKFLOW_POSTGRES_URL:-postgres://postgres:postgres@postgres:5432/fiscal_clone}
|
||||||
|
WORKFLOW_POSTGRES_JOB_PREFIX: ${WORKFLOW_POSTGRES_JOB_PREFIX:-fiscal_clone}
|
||||||
|
WORKFLOW_POSTGRES_WORKER_CONCURRENCY: ${WORKFLOW_POSTGRES_WORKER_CONCURRENCY:-10}
|
||||||
depends_on:
|
depends_on:
|
||||||
postgres:
|
postgres:
|
||||||
condition: service_healthy
|
condition: service_healthy
|
||||||
@@ -32,9 +36,6 @@ services:
|
|||||||
retries: 3
|
retries: 3
|
||||||
expose:
|
expose:
|
||||||
- "3000"
|
- "3000"
|
||||||
volumes:
|
|
||||||
- app_data:/app/data
|
|
||||||
|
|
||||||
postgres:
|
postgres:
|
||||||
image: postgres:16-alpine
|
image: postgres:16-alpine
|
||||||
restart: unless-stopped
|
restart: unless-stopped
|
||||||
@@ -51,5 +52,4 @@ services:
|
|||||||
retries: 5
|
retries: 5
|
||||||
|
|
||||||
volumes:
|
volumes:
|
||||||
app_data:
|
|
||||||
postgres_data:
|
postgres_data:
|
||||||
|
|||||||
94
drizzle/0001_boring_toad.sql
Normal file
94
drizzle/0001_boring_toad.sql
Normal file
@@ -0,0 +1,94 @@
|
|||||||
|
CREATE TABLE "filing" (
|
||||||
|
"id" integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY (sequence name "filing_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1),
|
||||||
|
"ticker" text NOT NULL,
|
||||||
|
"filing_type" text NOT NULL,
|
||||||
|
"filing_date" text NOT NULL,
|
||||||
|
"accession_number" text NOT NULL,
|
||||||
|
"cik" text NOT NULL,
|
||||||
|
"company_name" text NOT NULL,
|
||||||
|
"filing_url" text,
|
||||||
|
"submission_url" text,
|
||||||
|
"primary_document" text,
|
||||||
|
"metrics" jsonb,
|
||||||
|
"analysis" jsonb,
|
||||||
|
"created_at" timestamp with time zone NOT NULL,
|
||||||
|
"updated_at" timestamp with time zone NOT NULL
|
||||||
|
);
|
||||||
|
--> statement-breakpoint
|
||||||
|
CREATE TABLE "filing_link" (
|
||||||
|
"id" integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY (sequence name "filing_link_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1),
|
||||||
|
"filing_id" integer NOT NULL,
|
||||||
|
"link_type" text NOT NULL,
|
||||||
|
"url" text NOT NULL,
|
||||||
|
"source" text DEFAULT 'sec' NOT NULL,
|
||||||
|
"created_at" timestamp with time zone NOT NULL
|
||||||
|
);
|
||||||
|
--> statement-breakpoint
|
||||||
|
CREATE TABLE "holding" (
|
||||||
|
"id" integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY (sequence name "holding_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1),
|
||||||
|
"user_id" text NOT NULL,
|
||||||
|
"ticker" text NOT NULL,
|
||||||
|
"shares" numeric(30, 6) NOT NULL,
|
||||||
|
"avg_cost" numeric(30, 6) NOT NULL,
|
||||||
|
"current_price" numeric(30, 6),
|
||||||
|
"market_value" numeric(30, 2) NOT NULL,
|
||||||
|
"gain_loss" numeric(30, 2) NOT NULL,
|
||||||
|
"gain_loss_pct" numeric(30, 2) NOT NULL,
|
||||||
|
"last_price_at" timestamp with time zone,
|
||||||
|
"created_at" timestamp with time zone NOT NULL,
|
||||||
|
"updated_at" timestamp with time zone NOT NULL
|
||||||
|
);
|
||||||
|
--> statement-breakpoint
|
||||||
|
CREATE TABLE "portfolio_insight" (
|
||||||
|
"id" integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY (sequence name "portfolio_insight_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1),
|
||||||
|
"user_id" text NOT NULL,
|
||||||
|
"provider" text NOT NULL,
|
||||||
|
"model" text NOT NULL,
|
||||||
|
"content" text NOT NULL,
|
||||||
|
"created_at" timestamp with time zone NOT NULL
|
||||||
|
);
|
||||||
|
--> statement-breakpoint
|
||||||
|
CREATE TABLE "task_run" (
|
||||||
|
"id" text PRIMARY KEY NOT NULL,
|
||||||
|
"user_id" text NOT NULL,
|
||||||
|
"task_type" text NOT NULL,
|
||||||
|
"status" text NOT NULL,
|
||||||
|
"priority" integer NOT NULL,
|
||||||
|
"payload" jsonb NOT NULL,
|
||||||
|
"result" jsonb,
|
||||||
|
"error" text,
|
||||||
|
"attempts" integer NOT NULL,
|
||||||
|
"max_attempts" integer NOT NULL,
|
||||||
|
"workflow_run_id" text,
|
||||||
|
"created_at" timestamp with time zone NOT NULL,
|
||||||
|
"updated_at" timestamp with time zone NOT NULL,
|
||||||
|
"finished_at" timestamp with time zone
|
||||||
|
);
|
||||||
|
--> statement-breakpoint
|
||||||
|
CREATE TABLE "watchlist_item" (
|
||||||
|
"id" integer PRIMARY KEY GENERATED ALWAYS AS IDENTITY (sequence name "watchlist_item_id_seq" INCREMENT BY 1 MINVALUE 1 MAXVALUE 2147483647 START WITH 1 CACHE 1),
|
||||||
|
"user_id" text NOT NULL,
|
||||||
|
"ticker" text NOT NULL,
|
||||||
|
"company_name" text NOT NULL,
|
||||||
|
"sector" text,
|
||||||
|
"created_at" timestamp with time zone NOT NULL
|
||||||
|
);
|
||||||
|
--> statement-breakpoint
|
||||||
|
ALTER TABLE "filing_link" ADD CONSTRAINT "filing_link_filing_id_filing_id_fk" FOREIGN KEY ("filing_id") REFERENCES "public"."filing"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||||
|
ALTER TABLE "holding" ADD CONSTRAINT "holding_user_id_user_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."user"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||||
|
ALTER TABLE "portfolio_insight" ADD CONSTRAINT "portfolio_insight_user_id_user_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."user"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||||
|
ALTER TABLE "task_run" ADD CONSTRAINT "task_run_user_id_user_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."user"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||||
|
ALTER TABLE "watchlist_item" ADD CONSTRAINT "watchlist_item_user_id_user_id_fk" FOREIGN KEY ("user_id") REFERENCES "public"."user"("id") ON DELETE cascade ON UPDATE no action;--> statement-breakpoint
|
||||||
|
CREATE UNIQUE INDEX "filing_accession_uidx" ON "filing" USING btree ("accession_number");--> statement-breakpoint
|
||||||
|
CREATE INDEX "filing_ticker_date_idx" ON "filing" USING btree ("ticker","filing_date");--> statement-breakpoint
|
||||||
|
CREATE INDEX "filing_date_idx" ON "filing" USING btree ("filing_date");--> statement-breakpoint
|
||||||
|
CREATE UNIQUE INDEX "filing_link_unique_uidx" ON "filing_link" USING btree ("filing_id","url");--> statement-breakpoint
|
||||||
|
CREATE INDEX "filing_link_filing_idx" ON "filing_link" USING btree ("filing_id");--> statement-breakpoint
|
||||||
|
CREATE UNIQUE INDEX "holding_user_ticker_uidx" ON "holding" USING btree ("user_id","ticker");--> statement-breakpoint
|
||||||
|
CREATE INDEX "holding_user_idx" ON "holding" USING btree ("user_id");--> statement-breakpoint
|
||||||
|
CREATE INDEX "insight_user_created_idx" ON "portfolio_insight" USING btree ("user_id","created_at");--> statement-breakpoint
|
||||||
|
CREATE INDEX "task_user_created_idx" ON "task_run" USING btree ("user_id","created_at");--> statement-breakpoint
|
||||||
|
CREATE INDEX "task_status_idx" ON "task_run" USING btree ("status");--> statement-breakpoint
|
||||||
|
CREATE UNIQUE INDEX "task_workflow_run_uidx" ON "task_run" USING btree ("workflow_run_id");--> statement-breakpoint
|
||||||
|
CREATE UNIQUE INDEX "watchlist_user_ticker_uidx" ON "watchlist_item" USING btree ("user_id","ticker");--> statement-breakpoint
|
||||||
|
CREATE INDEX "watchlist_user_created_idx" ON "watchlist_item" USING btree ("user_id","created_at");
|
||||||
1502
drizzle/meta/0001_snapshot.json
Normal file
1502
drizzle/meta/0001_snapshot.json
Normal file
File diff suppressed because it is too large
Load Diff
@@ -8,6 +8,13 @@
|
|||||||
"when": 1771967961625,
|
"when": 1771967961625,
|
||||||
"tag": "0000_tense_centennial",
|
"tag": "0000_tense_centennial",
|
||||||
"breakpoints": true
|
"breakpoints": true
|
||||||
|
},
|
||||||
|
{
|
||||||
|
"idx": 1,
|
||||||
|
"version": "7",
|
||||||
|
"when": 1772076911227,
|
||||||
|
"tag": "0001_boring_toad",
|
||||||
|
"breakpoints": true
|
||||||
}
|
}
|
||||||
]
|
]
|
||||||
}
|
}
|
||||||
8
instrumentation.ts
Normal file
8
instrumentation.ts
Normal file
@@ -0,0 +1,8 @@
|
|||||||
|
export async function register() {
|
||||||
|
if (process.env.NEXT_RUNTIME === 'edge') {
|
||||||
|
return;
|
||||||
|
}
|
||||||
|
|
||||||
|
const { getWorld } = await import('workflow/runtime');
|
||||||
|
await getWorld().start?.();
|
||||||
|
}
|
||||||
167
lib/api.ts
167
lib/api.ts
@@ -1,3 +1,5 @@
|
|||||||
|
import { edenTreaty } from '@elysiajs/eden';
|
||||||
|
import type { App } from '@/lib/server/api/app';
|
||||||
import type {
|
import type {
|
||||||
Filing,
|
Filing,
|
||||||
Holding,
|
Holding,
|
||||||
@@ -11,6 +13,13 @@ import { resolveApiBaseURL } from './runtime-url';
|
|||||||
|
|
||||||
const API_BASE = resolveApiBaseURL(process.env.NEXT_PUBLIC_API_URL);
|
const API_BASE = resolveApiBaseURL(process.env.NEXT_PUBLIC_API_URL);
|
||||||
|
|
||||||
|
const client = edenTreaty<App>(API_BASE, {
|
||||||
|
$fetch: {
|
||||||
|
credentials: 'include',
|
||||||
|
cache: 'no-store'
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
export class ApiError extends Error {
|
export class ApiError extends Error {
|
||||||
status: number;
|
status: number;
|
||||||
|
|
||||||
@@ -21,56 +30,96 @@ export class ApiError extends Error {
|
|||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
async function apiFetch<T>(path: string, init?: RequestInit): Promise<T> {
|
function extractErrorMessage(error: unknown, fallback: string) {
|
||||||
const headers = new Headers(init?.headers);
|
if (!error || typeof error !== 'object') {
|
||||||
if (!headers.has('Content-Type')) {
|
return fallback;
|
||||||
headers.set('Content-Type', 'application/json');
|
|
||||||
}
|
}
|
||||||
|
|
||||||
const response = await fetch(`${API_BASE}${path}`, {
|
const candidate = error as {
|
||||||
...init,
|
value?: unknown;
|
||||||
credentials: 'include',
|
message?: string;
|
||||||
headers,
|
};
|
||||||
cache: 'no-store'
|
|
||||||
});
|
|
||||||
|
|
||||||
const body = await response.json().catch(() => ({}));
|
if (typeof candidate.message === 'string' && candidate.message.trim().length > 0) {
|
||||||
|
return candidate.message;
|
||||||
if (!response.ok) {
|
|
||||||
const message = typeof body?.error === 'string' ? body.error : `Request failed (${response.status})`;
|
|
||||||
throw new ApiError(message, response.status);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
return body as T;
|
if (candidate.value && typeof candidate.value === 'object') {
|
||||||
|
const nested = candidate.value as { error?: unknown; message?: unknown };
|
||||||
|
|
||||||
|
if (typeof nested.error === 'string' && nested.error.trim().length > 0) {
|
||||||
|
return nested.error;
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof nested.message === 'string' && nested.message.trim().length > 0) {
|
||||||
|
return nested.message;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
if (typeof candidate.value === 'string' && candidate.value.trim().length > 0) {
|
||||||
|
return candidate.value;
|
||||||
|
}
|
||||||
|
|
||||||
|
return fallback;
|
||||||
|
}
|
||||||
|
|
||||||
|
type TreatyResult = {
|
||||||
|
data: unknown;
|
||||||
|
error: unknown;
|
||||||
|
status: number;
|
||||||
|
};
|
||||||
|
|
||||||
|
async function unwrapData<T>(result: TreatyResult, fallback: string) {
|
||||||
|
if (result.error) {
|
||||||
|
throw new ApiError(
|
||||||
|
extractErrorMessage(result.error, fallback),
|
||||||
|
result.status
|
||||||
|
);
|
||||||
|
}
|
||||||
|
|
||||||
|
if (result.data === null || result.data === undefined) {
|
||||||
|
throw new ApiError(fallback, result.status);
|
||||||
|
}
|
||||||
|
|
||||||
|
const payload = result.data instanceof Response
|
||||||
|
? await result.data.json().catch(() => null)
|
||||||
|
: result.data;
|
||||||
|
|
||||||
|
if (payload === null || payload === undefined) {
|
||||||
|
throw new ApiError(fallback, result.status);
|
||||||
|
}
|
||||||
|
|
||||||
|
return payload as T;
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getMe() {
|
export async function getMe() {
|
||||||
return await apiFetch<{ user: User }>('/api/me');
|
const result = await client.api.me.get();
|
||||||
|
return await unwrapData<{ user: User }>(result, 'Unable to fetch session');
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function listWatchlist() {
|
export async function listWatchlist() {
|
||||||
return await apiFetch<{ items: WatchlistItem[] }>('/api/watchlist');
|
const result = await client.api.watchlist.get();
|
||||||
|
return await unwrapData<{ items: WatchlistItem[] }>(result, 'Unable to fetch watchlist');
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function upsertWatchlistItem(input: { ticker: string; companyName: string; sector?: string }) {
|
export async function upsertWatchlistItem(input: { ticker: string; companyName: string; sector?: string }) {
|
||||||
return await apiFetch<{ item: WatchlistItem }>('/api/watchlist', {
|
const result = await client.api.watchlist.post(input);
|
||||||
method: 'POST',
|
return await unwrapData<{ item: WatchlistItem }>(result, 'Unable to save watchlist item');
|
||||||
body: JSON.stringify(input)
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function deleteWatchlistItem(id: number) {
|
export async function deleteWatchlistItem(id: number) {
|
||||||
return await apiFetch<{ success: boolean }>(`/api/watchlist/${id}`, {
|
const result = await client.api.watchlist[id].delete();
|
||||||
method: 'DELETE'
|
return await unwrapData<{ success: boolean }>(result, 'Unable to delete watchlist item');
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function listHoldings() {
|
export async function listHoldings() {
|
||||||
return await apiFetch<{ holdings: Holding[] }>('/api/portfolio/holdings');
|
const result = await client.api.portfolio.holdings.get();
|
||||||
|
return await unwrapData<{ holdings: Holding[] }>(result, 'Unable to fetch holdings');
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getPortfolioSummary() {
|
export async function getPortfolioSummary() {
|
||||||
return await apiFetch<{ summary: PortfolioSummary }>('/api/portfolio/summary');
|
const result = await client.api.portfolio.summary.get();
|
||||||
|
return await unwrapData<{ summary: PortfolioSummary }>(result, 'Unable to fetch summary');
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function upsertHolding(input: {
|
export async function upsertHolding(input: {
|
||||||
@@ -79,66 +128,62 @@ export async function upsertHolding(input: {
|
|||||||
avgCost: number;
|
avgCost: number;
|
||||||
currentPrice?: number;
|
currentPrice?: number;
|
||||||
}) {
|
}) {
|
||||||
return await apiFetch<{ holding: Holding }>('/api/portfolio/holdings', {
|
const result = await client.api.portfolio.holdings.post(input);
|
||||||
method: 'POST',
|
return await unwrapData<{ holding: Holding }>(result, 'Unable to save holding');
|
||||||
body: JSON.stringify(input)
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function deleteHolding(id: number) {
|
export async function deleteHolding(id: number) {
|
||||||
return await apiFetch<{ success: boolean }>(`/api/portfolio/holdings/${id}`, {
|
const result = await client.api.portfolio.holdings[id].delete();
|
||||||
method: 'DELETE'
|
return await unwrapData<{ success: boolean }>(result, 'Unable to delete holding');
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function queuePriceRefresh() {
|
export async function queuePriceRefresh() {
|
||||||
return await apiFetch<{ task: Task }>('/api/portfolio/refresh-prices', {
|
const result = await client.api.portfolio['refresh-prices'].post();
|
||||||
method: 'POST'
|
return await unwrapData<{ task: Task }>(result, 'Unable to queue price refresh');
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function queuePortfolioInsights() {
|
export async function queuePortfolioInsights() {
|
||||||
return await apiFetch<{ task: Task }>('/api/portfolio/insights/generate', {
|
const result = await client.api.portfolio.insights.generate.post();
|
||||||
method: 'POST'
|
return await unwrapData<{ task: Task }>(result, 'Unable to queue portfolio insights');
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getLatestPortfolioInsight() {
|
export async function getLatestPortfolioInsight() {
|
||||||
return await apiFetch<{ insight: PortfolioInsight | null }>('/api/portfolio/insights/latest');
|
const result = await client.api.portfolio.insights.latest.get();
|
||||||
|
return await unwrapData<{ insight: PortfolioInsight | null }>(result, 'Unable to fetch latest insight');
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function listFilings(query?: { ticker?: string; limit?: number }) {
|
export async function listFilings(query?: { ticker?: string; limit?: number }) {
|
||||||
const params = new URLSearchParams();
|
const result = await client.api.filings.get({
|
||||||
|
$query: {
|
||||||
|
ticker: query?.ticker,
|
||||||
|
limit: query?.limit
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
if (query?.ticker) {
|
return await unwrapData<{ filings: Filing[] }>(result, 'Unable to fetch filings');
|
||||||
params.set('ticker', query.ticker);
|
|
||||||
}
|
|
||||||
|
|
||||||
if (query?.limit) {
|
|
||||||
params.set('limit', String(query.limit));
|
|
||||||
}
|
|
||||||
|
|
||||||
const suffix = params.size > 0 ? `?${params.toString()}` : '';
|
|
||||||
return await apiFetch<{ filings: Filing[] }>(`/api/filings${suffix}`);
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function queueFilingSync(input: { ticker: string; limit?: number }) {
|
export async function queueFilingSync(input: { ticker: string; limit?: number }) {
|
||||||
return await apiFetch<{ task: Task }>('/api/filings/sync', {
|
const result = await client.api.filings.sync.post(input);
|
||||||
method: 'POST',
|
return await unwrapData<{ task: Task }>(result, 'Unable to queue filing sync');
|
||||||
body: JSON.stringify(input)
|
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function queueFilingAnalysis(accessionNumber: string) {
|
export async function queueFilingAnalysis(accessionNumber: string) {
|
||||||
return await apiFetch<{ task: Task }>(`/api/filings/${accessionNumber}/analyze`, {
|
const result = await client.api.filings[accessionNumber].analyze.post();
|
||||||
method: 'POST'
|
return await unwrapData<{ task: Task }>(result, 'Unable to queue filing analysis');
|
||||||
});
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getTask(taskId: string) {
|
export async function getTask(taskId: string) {
|
||||||
return await apiFetch<{ task: Task }>(`/api/tasks/${taskId}`);
|
const result = await client.api.tasks[taskId].get();
|
||||||
|
return await unwrapData<{ task: Task }>(result, 'Unable to fetch task');
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function listRecentTasks(limit = 20) {
|
export async function listRecentTasks(limit = 20) {
|
||||||
return await apiFetch<{ tasks: Task[] }>(`/api/tasks?limit=${limit}`);
|
const result = await client.api.tasks.get({
|
||||||
|
$query: {
|
||||||
|
limit
|
||||||
|
}
|
||||||
|
});
|
||||||
|
|
||||||
|
return await unwrapData<{ tasks: Task[] }>(result, 'Unable to fetch tasks');
|
||||||
}
|
}
|
||||||
|
|||||||
458
lib/server/api/app.ts
Normal file
458
lib/server/api/app.ts
Normal file
@@ -0,0 +1,458 @@
|
|||||||
|
import { Elysia, t } from 'elysia';
|
||||||
|
import type { TaskStatus } from '@/lib/types';
|
||||||
|
import { auth } from '@/lib/auth';
|
||||||
|
import { requireAuthenticatedSession } from '@/lib/server/auth-session';
|
||||||
|
import { asErrorMessage, jsonError } from '@/lib/server/http';
|
||||||
|
import { buildPortfolioSummary } from '@/lib/server/portfolio';
|
||||||
|
import { listFilingsRecords } from '@/lib/server/repos/filings';
|
||||||
|
import {
|
||||||
|
deleteHoldingByIdRecord,
|
||||||
|
listUserHoldings,
|
||||||
|
updateHoldingByIdRecord,
|
||||||
|
upsertHoldingRecord
|
||||||
|
} from '@/lib/server/repos/holdings';
|
||||||
|
import { getLatestPortfolioInsight } from '@/lib/server/repos/insights';
|
||||||
|
import {
|
||||||
|
deleteWatchlistItemRecord,
|
||||||
|
listWatchlistItems,
|
||||||
|
upsertWatchlistItemRecord
|
||||||
|
} from '@/lib/server/repos/watchlist';
|
||||||
|
import {
|
||||||
|
enqueueTask,
|
||||||
|
getTaskById,
|
||||||
|
getTaskQueueSnapshot,
|
||||||
|
listRecentTasks
|
||||||
|
} from '@/lib/server/tasks';
|
||||||
|
|
||||||
|
const ALLOWED_STATUSES: TaskStatus[] = ['queued', 'running', 'completed', 'failed'];
|
||||||
|
|
||||||
|
function asRecord(value: unknown): Record<string, unknown> {
|
||||||
|
if (!value || typeof value !== 'object' || Array.isArray(value)) {
|
||||||
|
return {};
|
||||||
|
}
|
||||||
|
|
||||||
|
return value as Record<string, unknown>;
|
||||||
|
}
|
||||||
|
|
||||||
|
function asPositiveNumber(value: unknown) {
|
||||||
|
const parsed = typeof value === 'number' ? value : Number(value);
|
||||||
|
return Number.isFinite(parsed) && parsed > 0 ? parsed : null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const authHandler = ({ request }: { request: Request }) => auth.handler(request);
|
||||||
|
|
||||||
|
export const app = new Elysia({ prefix: '/api' })
|
||||||
|
.all('/auth', authHandler)
|
||||||
|
.all('/auth/*', authHandler)
|
||||||
|
.get('/health', async () => {
|
||||||
|
const queue = await getTaskQueueSnapshot();
|
||||||
|
|
||||||
|
return Response.json({
|
||||||
|
status: 'ok',
|
||||||
|
version: '4.0.0',
|
||||||
|
timestamp: new Date().toISOString(),
|
||||||
|
queue
|
||||||
|
});
|
||||||
|
})
|
||||||
|
.get('/me', async () => {
|
||||||
|
const { session, response } = await requireAuthenticatedSession();
|
||||||
|
if (response) {
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
|
||||||
|
return Response.json({
|
||||||
|
user: {
|
||||||
|
id: session.user.id,
|
||||||
|
email: session.user.email,
|
||||||
|
name: session.user.name,
|
||||||
|
image: session.user.image
|
||||||
|
}
|
||||||
|
});
|
||||||
|
})
|
||||||
|
.get('/watchlist', async () => {
|
||||||
|
const { session, response } = await requireAuthenticatedSession();
|
||||||
|
if (response) {
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
|
||||||
|
const items = await listWatchlistItems(session.user.id);
|
||||||
|
return Response.json({ items });
|
||||||
|
})
|
||||||
|
.post('/watchlist', async ({ body }) => {
|
||||||
|
const { session, response } = await requireAuthenticatedSession();
|
||||||
|
if (response) {
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
|
||||||
|
const payload = asRecord(body);
|
||||||
|
const ticker = typeof payload.ticker === 'string' ? payload.ticker.trim().toUpperCase() : '';
|
||||||
|
const companyName = typeof payload.companyName === 'string' ? payload.companyName.trim() : '';
|
||||||
|
const sector = typeof payload.sector === 'string' ? payload.sector.trim() : '';
|
||||||
|
|
||||||
|
if (!ticker) {
|
||||||
|
return jsonError('ticker is required');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (!companyName) {
|
||||||
|
return jsonError('companyName is required');
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const item = await upsertWatchlistItemRecord({
|
||||||
|
userId: session.user.id,
|
||||||
|
ticker,
|
||||||
|
companyName,
|
||||||
|
sector
|
||||||
|
});
|
||||||
|
|
||||||
|
return Response.json({ item });
|
||||||
|
} catch (error) {
|
||||||
|
return jsonError(asErrorMessage(error, 'Failed to create watchlist item'));
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
body: t.Object({
|
||||||
|
ticker: t.String({ minLength: 1 }),
|
||||||
|
companyName: t.String({ minLength: 1 }),
|
||||||
|
sector: t.Optional(t.String())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.delete('/watchlist/:id', async ({ params }) => {
|
||||||
|
const { session, response } = await requireAuthenticatedSession();
|
||||||
|
if (response) {
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
|
||||||
|
const numericId = Number(params.id);
|
||||||
|
if (!Number.isInteger(numericId) || numericId <= 0) {
|
||||||
|
return jsonError('Invalid watchlist id', 400);
|
||||||
|
}
|
||||||
|
|
||||||
|
const removed = await deleteWatchlistItemRecord(session.user.id, numericId);
|
||||||
|
|
||||||
|
if (!removed) {
|
||||||
|
return jsonError('Watchlist item not found', 404);
|
||||||
|
}
|
||||||
|
|
||||||
|
return Response.json({ success: true });
|
||||||
|
}, {
|
||||||
|
params: t.Object({
|
||||||
|
id: t.String({ minLength: 1 })
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.get('/portfolio/holdings', async () => {
|
||||||
|
const { session, response } = await requireAuthenticatedSession();
|
||||||
|
if (response) {
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
|
||||||
|
const holdings = await listUserHoldings(session.user.id);
|
||||||
|
|
||||||
|
return Response.json({ holdings });
|
||||||
|
})
|
||||||
|
.post('/portfolio/holdings', async ({ body }) => {
|
||||||
|
const { session, response } = await requireAuthenticatedSession();
|
||||||
|
if (response) {
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
|
||||||
|
const payload = asRecord(body);
|
||||||
|
const ticker = typeof payload.ticker === 'string' ? payload.ticker.trim().toUpperCase() : '';
|
||||||
|
const shares = asPositiveNumber(payload.shares);
|
||||||
|
const avgCost = asPositiveNumber(payload.avgCost);
|
||||||
|
|
||||||
|
if (!ticker) {
|
||||||
|
return jsonError('ticker is required');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (shares === null) {
|
||||||
|
return jsonError('shares must be a positive number');
|
||||||
|
}
|
||||||
|
|
||||||
|
if (avgCost === null) {
|
||||||
|
return jsonError('avgCost must be a positive number');
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const currentPrice = asPositiveNumber(payload.currentPrice) ?? avgCost;
|
||||||
|
|
||||||
|
const holding = await upsertHoldingRecord({
|
||||||
|
userId: session.user.id,
|
||||||
|
ticker,
|
||||||
|
shares,
|
||||||
|
avgCost,
|
||||||
|
currentPrice
|
||||||
|
});
|
||||||
|
|
||||||
|
return Response.json({ holding });
|
||||||
|
} catch (error) {
|
||||||
|
return jsonError(asErrorMessage(error, 'Failed to save holding'));
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
body: t.Object({
|
||||||
|
ticker: t.String({ minLength: 1 }),
|
||||||
|
shares: t.Number({ exclusiveMinimum: 0 }),
|
||||||
|
avgCost: t.Number({ exclusiveMinimum: 0 }),
|
||||||
|
currentPrice: t.Optional(t.Number({ exclusiveMinimum: 0 }))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.patch('/portfolio/holdings/:id', async ({ params, body }) => {
|
||||||
|
const { session, response } = await requireAuthenticatedSession();
|
||||||
|
if (response) {
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
|
||||||
|
const numericId = Number(params.id);
|
||||||
|
if (!Number.isInteger(numericId) || numericId <= 0) {
|
||||||
|
return jsonError('Invalid holding id');
|
||||||
|
}
|
||||||
|
|
||||||
|
const payload = asRecord(body);
|
||||||
|
|
||||||
|
const updated = await updateHoldingByIdRecord({
|
||||||
|
userId: session.user.id,
|
||||||
|
id: numericId,
|
||||||
|
shares: asPositiveNumber(payload.shares) ?? undefined,
|
||||||
|
avgCost: asPositiveNumber(payload.avgCost) ?? undefined,
|
||||||
|
currentPrice: asPositiveNumber(payload.currentPrice) ?? undefined
|
||||||
|
});
|
||||||
|
|
||||||
|
if (!updated) {
|
||||||
|
return jsonError('Holding not found', 404);
|
||||||
|
}
|
||||||
|
|
||||||
|
return Response.json({ holding: updated });
|
||||||
|
}, {
|
||||||
|
params: t.Object({
|
||||||
|
id: t.String({ minLength: 1 })
|
||||||
|
}),
|
||||||
|
body: t.Object({
|
||||||
|
shares: t.Optional(t.Number({ exclusiveMinimum: 0 })),
|
||||||
|
avgCost: t.Optional(t.Number({ exclusiveMinimum: 0 })),
|
||||||
|
currentPrice: t.Optional(t.Number({ exclusiveMinimum: 0 }))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.delete('/portfolio/holdings/:id', async ({ params }) => {
|
||||||
|
const { session, response } = await requireAuthenticatedSession();
|
||||||
|
if (response) {
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
|
||||||
|
const numericId = Number(params.id);
|
||||||
|
if (!Number.isInteger(numericId) || numericId <= 0) {
|
||||||
|
return jsonError('Invalid holding id');
|
||||||
|
}
|
||||||
|
|
||||||
|
const removed = await deleteHoldingByIdRecord(session.user.id, numericId);
|
||||||
|
|
||||||
|
if (!removed) {
|
||||||
|
return jsonError('Holding not found', 404);
|
||||||
|
}
|
||||||
|
|
||||||
|
return Response.json({ success: true });
|
||||||
|
}, {
|
||||||
|
params: t.Object({
|
||||||
|
id: t.String({ minLength: 1 })
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.get('/portfolio/summary', async () => {
|
||||||
|
const { session, response } = await requireAuthenticatedSession();
|
||||||
|
if (response) {
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
|
||||||
|
const holdings = await listUserHoldings(session.user.id);
|
||||||
|
const summary = buildPortfolioSummary(holdings);
|
||||||
|
|
||||||
|
return Response.json({ summary });
|
||||||
|
})
|
||||||
|
.post('/portfolio/refresh-prices', async () => {
|
||||||
|
const { session, response } = await requireAuthenticatedSession();
|
||||||
|
if (response) {
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const task = await enqueueTask({
|
||||||
|
userId: session.user.id,
|
||||||
|
taskType: 'refresh_prices',
|
||||||
|
payload: {},
|
||||||
|
priority: 80
|
||||||
|
});
|
||||||
|
|
||||||
|
return Response.json({ task });
|
||||||
|
} catch (error) {
|
||||||
|
return jsonError(asErrorMessage(error, 'Failed to queue refresh task'));
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.post('/portfolio/insights/generate', async () => {
|
||||||
|
const { session, response } = await requireAuthenticatedSession();
|
||||||
|
if (response) {
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const task = await enqueueTask({
|
||||||
|
userId: session.user.id,
|
||||||
|
taskType: 'portfolio_insights',
|
||||||
|
payload: {},
|
||||||
|
priority: 70
|
||||||
|
});
|
||||||
|
|
||||||
|
return Response.json({ task });
|
||||||
|
} catch (error) {
|
||||||
|
return jsonError(asErrorMessage(error, 'Failed to queue insights task'));
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.get('/portfolio/insights/latest', async () => {
|
||||||
|
const { session, response } = await requireAuthenticatedSession();
|
||||||
|
if (response) {
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
|
||||||
|
const insight = await getLatestPortfolioInsight(session.user.id);
|
||||||
|
|
||||||
|
return Response.json({ insight });
|
||||||
|
})
|
||||||
|
.get('/filings', async ({ query }) => {
|
||||||
|
const { response } = await requireAuthenticatedSession();
|
||||||
|
if (response) {
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
|
||||||
|
const tickerFilter = typeof query.ticker === 'string'
|
||||||
|
? query.ticker.trim().toUpperCase()
|
||||||
|
: undefined;
|
||||||
|
|
||||||
|
const limit = typeof query.limit === 'number'
|
||||||
|
? query.limit
|
||||||
|
: Number(query.limit);
|
||||||
|
|
||||||
|
const filings = await listFilingsRecords({
|
||||||
|
ticker: tickerFilter,
|
||||||
|
limit: Number.isFinite(limit) ? limit : 50
|
||||||
|
});
|
||||||
|
|
||||||
|
return Response.json({ filings });
|
||||||
|
}, {
|
||||||
|
query: t.Object({
|
||||||
|
ticker: t.Optional(t.String()),
|
||||||
|
limit: t.Optional(t.Numeric())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.post('/filings/sync', async ({ body }) => {
|
||||||
|
const { session, response } = await requireAuthenticatedSession();
|
||||||
|
if (response) {
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
|
||||||
|
const payload = asRecord(body);
|
||||||
|
const ticker = typeof payload.ticker === 'string' ? payload.ticker.trim().toUpperCase() : '';
|
||||||
|
|
||||||
|
if (!ticker) {
|
||||||
|
return jsonError('ticker is required');
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const limit = typeof payload.limit === 'number' ? payload.limit : Number(payload.limit);
|
||||||
|
const task = await enqueueTask({
|
||||||
|
userId: session.user.id,
|
||||||
|
taskType: 'sync_filings',
|
||||||
|
payload: {
|
||||||
|
ticker,
|
||||||
|
limit: Number.isFinite(limit) ? limit : 20
|
||||||
|
},
|
||||||
|
priority: 90
|
||||||
|
});
|
||||||
|
|
||||||
|
return Response.json({ task });
|
||||||
|
} catch (error) {
|
||||||
|
return jsonError(asErrorMessage(error, 'Failed to queue filings sync task'));
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
body: t.Object({
|
||||||
|
ticker: t.String({ minLength: 1 }),
|
||||||
|
limit: t.Optional(t.Numeric())
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.post('/filings/:accessionNumber/analyze', async ({ params }) => {
|
||||||
|
const { session, response } = await requireAuthenticatedSession();
|
||||||
|
if (response) {
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
|
||||||
|
const accessionNumber = params.accessionNumber?.trim() ?? '';
|
||||||
|
if (accessionNumber.length < 4) {
|
||||||
|
return jsonError('Invalid accession number');
|
||||||
|
}
|
||||||
|
|
||||||
|
try {
|
||||||
|
const task = await enqueueTask({
|
||||||
|
userId: session.user.id,
|
||||||
|
taskType: 'analyze_filing',
|
||||||
|
payload: { accessionNumber },
|
||||||
|
priority: 65
|
||||||
|
});
|
||||||
|
|
||||||
|
return Response.json({ task });
|
||||||
|
} catch (error) {
|
||||||
|
return jsonError(asErrorMessage(error, 'Failed to queue filing analysis task'));
|
||||||
|
}
|
||||||
|
}, {
|
||||||
|
params: t.Object({
|
||||||
|
accessionNumber: t.String({ minLength: 4 })
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.get('/tasks', async ({ query }) => {
|
||||||
|
const { session, response } = await requireAuthenticatedSession();
|
||||||
|
if (response) {
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
|
||||||
|
const limit = typeof query.limit === 'number'
|
||||||
|
? query.limit
|
||||||
|
: Number(query.limit ?? 20);
|
||||||
|
|
||||||
|
const statusInput = query.status;
|
||||||
|
|
||||||
|
const rawStatuses = Array.isArray(statusInput)
|
||||||
|
? statusInput
|
||||||
|
: statusInput
|
||||||
|
? [statusInput]
|
||||||
|
: [];
|
||||||
|
|
||||||
|
const statuses = rawStatuses.filter((status): status is TaskStatus => {
|
||||||
|
return ALLOWED_STATUSES.includes(status as TaskStatus);
|
||||||
|
});
|
||||||
|
|
||||||
|
const tasks = await listRecentTasks(
|
||||||
|
session.user.id,
|
||||||
|
Number.isFinite(limit) ? limit : 20,
|
||||||
|
statuses.length > 0 ? statuses : undefined
|
||||||
|
);
|
||||||
|
|
||||||
|
return Response.json({ tasks });
|
||||||
|
}, {
|
||||||
|
query: t.Object({
|
||||||
|
limit: t.Optional(t.Numeric()),
|
||||||
|
status: t.Optional(t.Union([t.String(), t.Array(t.String())]))
|
||||||
|
})
|
||||||
|
})
|
||||||
|
.get('/tasks/:taskId', async ({ params }) => {
|
||||||
|
const { session, response } = await requireAuthenticatedSession();
|
||||||
|
if (response) {
|
||||||
|
return response;
|
||||||
|
}
|
||||||
|
|
||||||
|
const task = await getTaskById(params.taskId, session.user.id);
|
||||||
|
if (!task) {
|
||||||
|
return jsonError('Task not found', 404);
|
||||||
|
}
|
||||||
|
|
||||||
|
return Response.json({ task });
|
||||||
|
}, {
|
||||||
|
params: t.Object({
|
||||||
|
taskId: t.String({ minLength: 1 })
|
||||||
|
})
|
||||||
|
});
|
||||||
|
|
||||||
|
export type App = typeof app;
|
||||||
@@ -1,14 +1,14 @@
|
|||||||
import { drizzle } from 'drizzle-orm/node-postgres';
|
import { drizzle } from 'drizzle-orm/node-postgres';
|
||||||
import { Pool } from 'pg';
|
import { Pool } from 'pg';
|
||||||
import { authSchema } from './schema';
|
import { schema } from './schema';
|
||||||
|
|
||||||
type AuthDrizzleDb = ReturnType<typeof createDb>;
|
type AppDrizzleDb = ReturnType<typeof createDb>;
|
||||||
|
|
||||||
declare global {
|
declare global {
|
||||||
// eslint-disable-next-line no-var
|
// eslint-disable-next-line no-var
|
||||||
var __fiscalAuthPgPool: Pool | undefined;
|
var __fiscalPgPool: Pool | undefined;
|
||||||
// eslint-disable-next-line no-var
|
// eslint-disable-next-line no-var
|
||||||
var __fiscalAuthDrizzleDb: AuthDrizzleDb | undefined;
|
var __fiscalDrizzleDb: AppDrizzleDb | undefined;
|
||||||
}
|
}
|
||||||
|
|
||||||
function getConnectionString() {
|
function getConnectionString() {
|
||||||
@@ -21,21 +21,21 @@ function getConnectionString() {
|
|||||||
}
|
}
|
||||||
|
|
||||||
export function getPool() {
|
export function getPool() {
|
||||||
if (!globalThis.__fiscalAuthPgPool) {
|
if (!globalThis.__fiscalPgPool) {
|
||||||
globalThis.__fiscalAuthPgPool = new Pool({
|
globalThis.__fiscalPgPool = new Pool({
|
||||||
connectionString: getConnectionString()
|
connectionString: getConnectionString()
|
||||||
});
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
return globalThis.__fiscalAuthPgPool;
|
return globalThis.__fiscalPgPool;
|
||||||
}
|
}
|
||||||
|
|
||||||
function createDb() {
|
function createDb() {
|
||||||
return drizzle(getPool(), { schema: authSchema });
|
return drizzle(getPool(), { schema });
|
||||||
}
|
}
|
||||||
|
|
||||||
export const db = globalThis.__fiscalAuthDrizzleDb ?? createDb();
|
export const db = globalThis.__fiscalDrizzleDb ?? createDb();
|
||||||
|
|
||||||
if (!globalThis.__fiscalAuthDrizzleDb) {
|
if (!globalThis.__fiscalDrizzleDb) {
|
||||||
globalThis.__fiscalAuthDrizzleDb = db;
|
globalThis.__fiscalDrizzleDb = db;
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -1,22 +1,52 @@
|
|||||||
import { boolean, index, pgTable, text, timestamp, uniqueIndex } from 'drizzle-orm/pg-core';
|
import {
|
||||||
|
boolean,
|
||||||
|
index,
|
||||||
|
integer,
|
||||||
|
jsonb,
|
||||||
|
numeric,
|
||||||
|
pgTable,
|
||||||
|
text,
|
||||||
|
timestamp,
|
||||||
|
uniqueIndex
|
||||||
|
} from 'drizzle-orm/pg-core';
|
||||||
|
|
||||||
const dateColumn = {
|
type FilingMetrics = {
|
||||||
|
revenue: number | null;
|
||||||
|
netIncome: number | null;
|
||||||
|
totalAssets: number | null;
|
||||||
|
cash: number | null;
|
||||||
|
debt: number | null;
|
||||||
|
};
|
||||||
|
|
||||||
|
type FilingAnalysis = {
|
||||||
|
provider?: string;
|
||||||
|
model?: string;
|
||||||
|
text?: string;
|
||||||
|
legacyInsights?: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
const authDateColumn = {
|
||||||
withTimezone: true,
|
withTimezone: true,
|
||||||
mode: 'date'
|
mode: 'date'
|
||||||
} as const;
|
} as const;
|
||||||
|
|
||||||
|
const appDateColumn = {
|
||||||
|
withTimezone: true,
|
||||||
|
mode: 'string'
|
||||||
|
} as const;
|
||||||
|
|
||||||
export const user = pgTable('user', {
|
export const user = pgTable('user', {
|
||||||
id: text('id').primaryKey().notNull(),
|
id: text('id').primaryKey().notNull(),
|
||||||
name: text('name').notNull(),
|
name: text('name').notNull(),
|
||||||
email: text('email').notNull(),
|
email: text('email').notNull(),
|
||||||
emailVerified: boolean('emailVerified').notNull().default(false),
|
emailVerified: boolean('emailVerified').notNull().default(false),
|
||||||
image: text('image'),
|
image: text('image'),
|
||||||
createdAt: timestamp('createdAt', dateColumn).notNull(),
|
createdAt: timestamp('createdAt', authDateColumn).notNull(),
|
||||||
updatedAt: timestamp('updatedAt', dateColumn).notNull(),
|
updatedAt: timestamp('updatedAt', authDateColumn).notNull(),
|
||||||
role: text('role'),
|
role: text('role'),
|
||||||
banned: boolean('banned').default(false),
|
banned: boolean('banned').default(false),
|
||||||
banReason: text('banReason'),
|
banReason: text('banReason'),
|
||||||
banExpires: timestamp('banExpires', dateColumn)
|
banExpires: timestamp('banExpires', authDateColumn)
|
||||||
}, (table) => ({
|
}, (table) => ({
|
||||||
userEmailUnique: uniqueIndex('user_email_uidx').on(table.email)
|
userEmailUnique: uniqueIndex('user_email_uidx').on(table.email)
|
||||||
}));
|
}));
|
||||||
@@ -26,7 +56,7 @@ export const organization = pgTable('organization', {
|
|||||||
name: text('name').notNull(),
|
name: text('name').notNull(),
|
||||||
slug: text('slug').notNull(),
|
slug: text('slug').notNull(),
|
||||||
logo: text('logo'),
|
logo: text('logo'),
|
||||||
createdAt: timestamp('createdAt', dateColumn).notNull(),
|
createdAt: timestamp('createdAt', authDateColumn).notNull(),
|
||||||
metadata: text('metadata')
|
metadata: text('metadata')
|
||||||
}, (table) => ({
|
}, (table) => ({
|
||||||
organizationSlugUnique: uniqueIndex('organization_slug_uidx').on(table.slug)
|
organizationSlugUnique: uniqueIndex('organization_slug_uidx').on(table.slug)
|
||||||
@@ -34,10 +64,10 @@ export const organization = pgTable('organization', {
|
|||||||
|
|
||||||
export const session = pgTable('session', {
|
export const session = pgTable('session', {
|
||||||
id: text('id').primaryKey().notNull(),
|
id: text('id').primaryKey().notNull(),
|
||||||
expiresAt: timestamp('expiresAt', dateColumn).notNull(),
|
expiresAt: timestamp('expiresAt', authDateColumn).notNull(),
|
||||||
token: text('token').notNull(),
|
token: text('token').notNull(),
|
||||||
createdAt: timestamp('createdAt', dateColumn).notNull(),
|
createdAt: timestamp('createdAt', authDateColumn).notNull(),
|
||||||
updatedAt: timestamp('updatedAt', dateColumn).notNull(),
|
updatedAt: timestamp('updatedAt', authDateColumn).notNull(),
|
||||||
ipAddress: text('ipAddress'),
|
ipAddress: text('ipAddress'),
|
||||||
userAgent: text('userAgent'),
|
userAgent: text('userAgent'),
|
||||||
userId: text('userId').notNull().references(() => user.id, { onDelete: 'cascade' }),
|
userId: text('userId').notNull().references(() => user.id, { onDelete: 'cascade' }),
|
||||||
@@ -56,12 +86,12 @@ export const account = pgTable('account', {
|
|||||||
accessToken: text('accessToken'),
|
accessToken: text('accessToken'),
|
||||||
refreshToken: text('refreshToken'),
|
refreshToken: text('refreshToken'),
|
||||||
idToken: text('idToken'),
|
idToken: text('idToken'),
|
||||||
accessTokenExpiresAt: timestamp('accessTokenExpiresAt', dateColumn),
|
accessTokenExpiresAt: timestamp('accessTokenExpiresAt', authDateColumn),
|
||||||
refreshTokenExpiresAt: timestamp('refreshTokenExpiresAt', dateColumn),
|
refreshTokenExpiresAt: timestamp('refreshTokenExpiresAt', authDateColumn),
|
||||||
scope: text('scope'),
|
scope: text('scope'),
|
||||||
password: text('password'),
|
password: text('password'),
|
||||||
createdAt: timestamp('createdAt', dateColumn).notNull(),
|
createdAt: timestamp('createdAt', authDateColumn).notNull(),
|
||||||
updatedAt: timestamp('updatedAt', dateColumn).notNull()
|
updatedAt: timestamp('updatedAt', authDateColumn).notNull()
|
||||||
}, (table) => ({
|
}, (table) => ({
|
||||||
accountUserIdIndex: index('account_userId_idx').on(table.userId)
|
accountUserIdIndex: index('account_userId_idx').on(table.userId)
|
||||||
}));
|
}));
|
||||||
@@ -70,9 +100,9 @@ export const verification = pgTable('verification', {
|
|||||||
id: text('id').primaryKey().notNull(),
|
id: text('id').primaryKey().notNull(),
|
||||||
identifier: text('identifier').notNull(),
|
identifier: text('identifier').notNull(),
|
||||||
value: text('value').notNull(),
|
value: text('value').notNull(),
|
||||||
expiresAt: timestamp('expiresAt', dateColumn).notNull(),
|
expiresAt: timestamp('expiresAt', authDateColumn).notNull(),
|
||||||
createdAt: timestamp('createdAt', dateColumn).notNull(),
|
createdAt: timestamp('createdAt', authDateColumn).notNull(),
|
||||||
updatedAt: timestamp('updatedAt', dateColumn).notNull()
|
updatedAt: timestamp('updatedAt', authDateColumn).notNull()
|
||||||
}, (table) => ({
|
}, (table) => ({
|
||||||
verificationIdentifierIndex: index('verification_identifier_idx').on(table.identifier)
|
verificationIdentifierIndex: index('verification_identifier_idx').on(table.identifier)
|
||||||
}));
|
}));
|
||||||
@@ -82,7 +112,7 @@ export const member = pgTable('member', {
|
|||||||
organizationId: text('organizationId').notNull().references(() => organization.id, { onDelete: 'cascade' }),
|
organizationId: text('organizationId').notNull().references(() => organization.id, { onDelete: 'cascade' }),
|
||||||
userId: text('userId').notNull().references(() => user.id, { onDelete: 'cascade' }),
|
userId: text('userId').notNull().references(() => user.id, { onDelete: 'cascade' }),
|
||||||
role: text('role').notNull().default('member'),
|
role: text('role').notNull().default('member'),
|
||||||
createdAt: timestamp('createdAt', dateColumn).notNull()
|
createdAt: timestamp('createdAt', authDateColumn).notNull()
|
||||||
}, (table) => ({
|
}, (table) => ({
|
||||||
memberOrganizationIdIndex: index('member_organizationId_idx').on(table.organizationId),
|
memberOrganizationIdIndex: index('member_organizationId_idx').on(table.organizationId),
|
||||||
memberUserIdIndex: index('member_userId_idx').on(table.userId)
|
memberUserIdIndex: index('member_userId_idx').on(table.userId)
|
||||||
@@ -94,14 +124,109 @@ export const invitation = pgTable('invitation', {
|
|||||||
email: text('email').notNull(),
|
email: text('email').notNull(),
|
||||||
role: text('role'),
|
role: text('role'),
|
||||||
status: text('status').notNull().default('pending'),
|
status: text('status').notNull().default('pending'),
|
||||||
expiresAt: timestamp('expiresAt', dateColumn).notNull(),
|
expiresAt: timestamp('expiresAt', authDateColumn).notNull(),
|
||||||
createdAt: timestamp('createdAt', dateColumn).notNull(),
|
createdAt: timestamp('createdAt', authDateColumn).notNull(),
|
||||||
inviterId: text('inviterId').notNull().references(() => user.id, { onDelete: 'cascade' })
|
inviterId: text('inviterId').notNull().references(() => user.id, { onDelete: 'cascade' })
|
||||||
}, (table) => ({
|
}, (table) => ({
|
||||||
invitationOrganizationIdIndex: index('invitation_organizationId_idx').on(table.organizationId),
|
invitationOrganizationIdIndex: index('invitation_organizationId_idx').on(table.organizationId),
|
||||||
invitationEmailIndex: index('invitation_email_idx').on(table.email)
|
invitationEmailIndex: index('invitation_email_idx').on(table.email)
|
||||||
}));
|
}));
|
||||||
|
|
||||||
|
export const watchlistItem = pgTable('watchlist_item', {
|
||||||
|
id: integer('id').generatedAlwaysAsIdentity().primaryKey(),
|
||||||
|
user_id: text('user_id').notNull().references(() => user.id, { onDelete: 'cascade' }),
|
||||||
|
ticker: text('ticker').notNull(),
|
||||||
|
company_name: text('company_name').notNull(),
|
||||||
|
sector: text('sector'),
|
||||||
|
created_at: timestamp('created_at', appDateColumn).notNull()
|
||||||
|
}, (table) => ({
|
||||||
|
watchlistUserTickerUnique: uniqueIndex('watchlist_user_ticker_uidx').on(table.user_id, table.ticker),
|
||||||
|
watchlistUserCreatedIndex: index('watchlist_user_created_idx').on(table.user_id, table.created_at)
|
||||||
|
}));
|
||||||
|
|
||||||
|
export const holding = pgTable('holding', {
|
||||||
|
id: integer('id').generatedAlwaysAsIdentity().primaryKey(),
|
||||||
|
user_id: text('user_id').notNull().references(() => user.id, { onDelete: 'cascade' }),
|
||||||
|
ticker: text('ticker').notNull(),
|
||||||
|
shares: numeric('shares', { precision: 30, scale: 6 }).notNull(),
|
||||||
|
avg_cost: numeric('avg_cost', { precision: 30, scale: 6 }).notNull(),
|
||||||
|
current_price: numeric('current_price', { precision: 30, scale: 6 }),
|
||||||
|
market_value: numeric('market_value', { precision: 30, scale: 2 }).notNull(),
|
||||||
|
gain_loss: numeric('gain_loss', { precision: 30, scale: 2 }).notNull(),
|
||||||
|
gain_loss_pct: numeric('gain_loss_pct', { precision: 30, scale: 2 }).notNull(),
|
||||||
|
last_price_at: timestamp('last_price_at', appDateColumn),
|
||||||
|
created_at: timestamp('created_at', appDateColumn).notNull(),
|
||||||
|
updated_at: timestamp('updated_at', appDateColumn).notNull()
|
||||||
|
}, (table) => ({
|
||||||
|
holdingUserTickerUnique: uniqueIndex('holding_user_ticker_uidx').on(table.user_id, table.ticker),
|
||||||
|
holdingUserIndex: index('holding_user_idx').on(table.user_id)
|
||||||
|
}));
|
||||||
|
|
||||||
|
export const filing = pgTable('filing', {
|
||||||
|
id: integer('id').generatedAlwaysAsIdentity().primaryKey(),
|
||||||
|
ticker: text('ticker').notNull(),
|
||||||
|
filing_type: text('filing_type').$type<'10-K' | '10-Q' | '8-K'>().notNull(),
|
||||||
|
filing_date: text('filing_date').notNull(),
|
||||||
|
accession_number: text('accession_number').notNull(),
|
||||||
|
cik: text('cik').notNull(),
|
||||||
|
company_name: text('company_name').notNull(),
|
||||||
|
filing_url: text('filing_url'),
|
||||||
|
submission_url: text('submission_url'),
|
||||||
|
primary_document: text('primary_document'),
|
||||||
|
metrics: jsonb('metrics').$type<FilingMetrics | null>(),
|
||||||
|
analysis: jsonb('analysis').$type<FilingAnalysis | null>(),
|
||||||
|
created_at: timestamp('created_at', appDateColumn).notNull(),
|
||||||
|
updated_at: timestamp('updated_at', appDateColumn).notNull()
|
||||||
|
}, (table) => ({
|
||||||
|
filingAccessionUnique: uniqueIndex('filing_accession_uidx').on(table.accession_number),
|
||||||
|
filingTickerDateIndex: index('filing_ticker_date_idx').on(table.ticker, table.filing_date),
|
||||||
|
filingDateIndex: index('filing_date_idx').on(table.filing_date)
|
||||||
|
}));
|
||||||
|
|
||||||
|
export const filingLink = pgTable('filing_link', {
|
||||||
|
id: integer('id').generatedAlwaysAsIdentity().primaryKey(),
|
||||||
|
filing_id: integer('filing_id').notNull().references(() => filing.id, { onDelete: 'cascade' }),
|
||||||
|
link_type: text('link_type').notNull(),
|
||||||
|
url: text('url').notNull(),
|
||||||
|
source: text('source').notNull().default('sec'),
|
||||||
|
created_at: timestamp('created_at', appDateColumn).notNull()
|
||||||
|
}, (table) => ({
|
||||||
|
filingLinkUnique: uniqueIndex('filing_link_unique_uidx').on(table.filing_id, table.url),
|
||||||
|
filingLinkFilingIndex: index('filing_link_filing_idx').on(table.filing_id)
|
||||||
|
}));
|
||||||
|
|
||||||
|
export const taskRun = pgTable('task_run', {
|
||||||
|
id: text('id').primaryKey().notNull(),
|
||||||
|
user_id: text('user_id').notNull().references(() => user.id, { onDelete: 'cascade' }),
|
||||||
|
task_type: text('task_type').$type<'sync_filings' | 'refresh_prices' | 'analyze_filing' | 'portfolio_insights'>().notNull(),
|
||||||
|
status: text('status').$type<'queued' | 'running' | 'completed' | 'failed'>().notNull(),
|
||||||
|
priority: integer('priority').notNull(),
|
||||||
|
payload: jsonb('payload').$type<Record<string, unknown>>().notNull(),
|
||||||
|
result: jsonb('result').$type<Record<string, unknown> | null>(),
|
||||||
|
error: text('error'),
|
||||||
|
attempts: integer('attempts').notNull(),
|
||||||
|
max_attempts: integer('max_attempts').notNull(),
|
||||||
|
workflow_run_id: text('workflow_run_id'),
|
||||||
|
created_at: timestamp('created_at', appDateColumn).notNull(),
|
||||||
|
updated_at: timestamp('updated_at', appDateColumn).notNull(),
|
||||||
|
finished_at: timestamp('finished_at', appDateColumn)
|
||||||
|
}, (table) => ({
|
||||||
|
taskUserCreatedIndex: index('task_user_created_idx').on(table.user_id, table.created_at),
|
||||||
|
taskStatusIndex: index('task_status_idx').on(table.status),
|
||||||
|
taskWorkflowRunUnique: uniqueIndex('task_workflow_run_uidx').on(table.workflow_run_id)
|
||||||
|
}));
|
||||||
|
|
||||||
|
export const portfolioInsight = pgTable('portfolio_insight', {
|
||||||
|
id: integer('id').generatedAlwaysAsIdentity().primaryKey(),
|
||||||
|
user_id: text('user_id').notNull().references(() => user.id, { onDelete: 'cascade' }),
|
||||||
|
provider: text('provider').notNull(),
|
||||||
|
model: text('model').notNull(),
|
||||||
|
content: text('content').notNull(),
|
||||||
|
created_at: timestamp('created_at', appDateColumn).notNull()
|
||||||
|
}, (table) => ({
|
||||||
|
insightUserCreatedIndex: index('insight_user_created_idx').on(table.user_id, table.created_at)
|
||||||
|
}));
|
||||||
|
|
||||||
export const authSchema = {
|
export const authSchema = {
|
||||||
user,
|
user,
|
||||||
session,
|
session,
|
||||||
@@ -111,3 +236,17 @@ export const authSchema = {
|
|||||||
member,
|
member,
|
||||||
invitation
|
invitation
|
||||||
};
|
};
|
||||||
|
|
||||||
|
export const appSchema = {
|
||||||
|
watchlistItem,
|
||||||
|
holding,
|
||||||
|
filing,
|
||||||
|
filingLink,
|
||||||
|
taskRun,
|
||||||
|
portfolioInsight
|
||||||
|
};
|
||||||
|
|
||||||
|
export const schema = {
|
||||||
|
...authSchema,
|
||||||
|
...appSchema
|
||||||
|
};
|
||||||
|
|||||||
172
lib/server/repos/filings.ts
Normal file
172
lib/server/repos/filings.ts
Normal file
@@ -0,0 +1,172 @@
|
|||||||
|
import { desc, eq } from 'drizzle-orm';
|
||||||
|
import type { Filing } from '@/lib/types';
|
||||||
|
import { db } from '@/lib/server/db';
|
||||||
|
import { filing, filingLink } from '@/lib/server/db/schema';
|
||||||
|
|
||||||
|
type FilingRow = typeof filing.$inferSelect;
|
||||||
|
|
||||||
|
type FilingLinkInput = {
|
||||||
|
link_type: string;
|
||||||
|
url: string;
|
||||||
|
};
|
||||||
|
|
||||||
|
type UpsertFilingInput = {
|
||||||
|
ticker: string;
|
||||||
|
filing_type: Filing['filing_type'];
|
||||||
|
filing_date: string;
|
||||||
|
accession_number: string;
|
||||||
|
cik: string;
|
||||||
|
company_name: string;
|
||||||
|
filing_url: string | null;
|
||||||
|
submission_url: string | null;
|
||||||
|
primary_document: string | null;
|
||||||
|
metrics: Filing['metrics'];
|
||||||
|
links: FilingLinkInput[];
|
||||||
|
};
|
||||||
|
|
||||||
|
function toFiling(row: FilingRow): Filing {
|
||||||
|
return {
|
||||||
|
id: row.id,
|
||||||
|
ticker: row.ticker,
|
||||||
|
filing_type: row.filing_type,
|
||||||
|
filing_date: row.filing_date,
|
||||||
|
accession_number: row.accession_number,
|
||||||
|
cik: row.cik,
|
||||||
|
company_name: row.company_name,
|
||||||
|
filing_url: row.filing_url,
|
||||||
|
submission_url: row.submission_url,
|
||||||
|
primary_document: row.primary_document,
|
||||||
|
metrics: row.metrics ?? null,
|
||||||
|
analysis: row.analysis ?? null,
|
||||||
|
created_at: row.created_at,
|
||||||
|
updated_at: row.updated_at
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function dedupeLinks(links: FilingLinkInput[]) {
|
||||||
|
const unique = new Map<string, FilingLinkInput>();
|
||||||
|
|
||||||
|
for (const link of links) {
|
||||||
|
const url = link.url.trim();
|
||||||
|
if (!url) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
unique.set(`${link.link_type}::${url}`, { ...link, url });
|
||||||
|
}
|
||||||
|
|
||||||
|
return [...unique.values()];
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function listFilingsRecords(query?: { ticker?: string; limit?: number }) {
|
||||||
|
const safeLimit = Math.min(Math.max(Math.trunc(query?.limit ?? 50), 1), 250);
|
||||||
|
|
||||||
|
const rows = query?.ticker
|
||||||
|
? await db
|
||||||
|
.select()
|
||||||
|
.from(filing)
|
||||||
|
.where(eq(filing.ticker, query.ticker))
|
||||||
|
.orderBy(desc(filing.filing_date), desc(filing.updated_at))
|
||||||
|
.limit(safeLimit)
|
||||||
|
: await db
|
||||||
|
.select()
|
||||||
|
.from(filing)
|
||||||
|
.orderBy(desc(filing.filing_date), desc(filing.updated_at))
|
||||||
|
.limit(safeLimit);
|
||||||
|
|
||||||
|
return rows.map(toFiling);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getFilingByAccession(accessionNumber: string) {
|
||||||
|
const [row] = await db
|
||||||
|
.select()
|
||||||
|
.from(filing)
|
||||||
|
.where(eq(filing.accession_number, accessionNumber))
|
||||||
|
.limit(1);
|
||||||
|
|
||||||
|
return row ? toFiling(row) : null;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function upsertFilingsRecords(items: UpsertFilingInput[]) {
|
||||||
|
let inserted = 0;
|
||||||
|
let updated = 0;
|
||||||
|
|
||||||
|
for (const item of items) {
|
||||||
|
const now = new Date().toISOString();
|
||||||
|
|
||||||
|
const existing = await getFilingByAccession(item.accession_number);
|
||||||
|
|
||||||
|
const [saved] = await db
|
||||||
|
.insert(filing)
|
||||||
|
.values({
|
||||||
|
ticker: item.ticker,
|
||||||
|
filing_type: item.filing_type,
|
||||||
|
filing_date: item.filing_date,
|
||||||
|
accession_number: item.accession_number,
|
||||||
|
cik: item.cik,
|
||||||
|
company_name: item.company_name,
|
||||||
|
filing_url: item.filing_url,
|
||||||
|
submission_url: item.submission_url,
|
||||||
|
primary_document: item.primary_document,
|
||||||
|
metrics: item.metrics,
|
||||||
|
analysis: existing?.analysis ?? null,
|
||||||
|
created_at: existing?.created_at ?? now,
|
||||||
|
updated_at: now
|
||||||
|
})
|
||||||
|
.onConflictDoUpdate({
|
||||||
|
target: filing.accession_number,
|
||||||
|
set: {
|
||||||
|
ticker: item.ticker,
|
||||||
|
filing_type: item.filing_type,
|
||||||
|
filing_date: item.filing_date,
|
||||||
|
cik: item.cik,
|
||||||
|
company_name: item.company_name,
|
||||||
|
filing_url: item.filing_url,
|
||||||
|
submission_url: item.submission_url,
|
||||||
|
primary_document: item.primary_document,
|
||||||
|
metrics: item.metrics,
|
||||||
|
updated_at: now
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.returning({ id: filing.id });
|
||||||
|
|
||||||
|
const links = dedupeLinks(item.links);
|
||||||
|
|
||||||
|
for (const link of links) {
|
||||||
|
await db
|
||||||
|
.insert(filingLink)
|
||||||
|
.values({
|
||||||
|
filing_id: saved.id,
|
||||||
|
link_type: link.link_type,
|
||||||
|
url: link.url,
|
||||||
|
source: 'sec',
|
||||||
|
created_at: now
|
||||||
|
})
|
||||||
|
.onConflictDoNothing();
|
||||||
|
}
|
||||||
|
|
||||||
|
if (existing) {
|
||||||
|
updated += 1;
|
||||||
|
} else {
|
||||||
|
inserted += 1;
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
return { inserted, updated };
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function saveFilingAnalysis(
|
||||||
|
accessionNumber: string,
|
||||||
|
analysis: Filing['analysis']
|
||||||
|
) {
|
||||||
|
const [updated] = await db
|
||||||
|
.update(filing)
|
||||||
|
.set({
|
||||||
|
analysis,
|
||||||
|
updated_at: new Date().toISOString()
|
||||||
|
})
|
||||||
|
.where(eq(filing.accession_number, accessionNumber))
|
||||||
|
.returning();
|
||||||
|
|
||||||
|
return updated ? toFiling(updated) : null;
|
||||||
|
}
|
||||||
260
lib/server/repos/holdings.ts
Normal file
260
lib/server/repos/holdings.ts
Normal file
@@ -0,0 +1,260 @@
|
|||||||
|
import { and, eq } from 'drizzle-orm';
|
||||||
|
import type { Holding } from '@/lib/types';
|
||||||
|
import { recalculateHolding } from '@/lib/server/portfolio';
|
||||||
|
import { db } from '@/lib/server/db';
|
||||||
|
import { holding } from '@/lib/server/db/schema';
|
||||||
|
|
||||||
|
type HoldingRow = typeof holding.$inferSelect;
|
||||||
|
|
||||||
|
function toHolding(row: HoldingRow): Holding {
|
||||||
|
return {
|
||||||
|
id: row.id,
|
||||||
|
user_id: row.user_id,
|
||||||
|
ticker: row.ticker,
|
||||||
|
shares: row.shares,
|
||||||
|
avg_cost: row.avg_cost,
|
||||||
|
current_price: row.current_price,
|
||||||
|
market_value: row.market_value,
|
||||||
|
gain_loss: row.gain_loss,
|
||||||
|
gain_loss_pct: row.gain_loss_pct,
|
||||||
|
last_price_at: row.last_price_at,
|
||||||
|
created_at: row.created_at,
|
||||||
|
updated_at: row.updated_at
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function sortByMarketValueDesc(rows: Holding[]) {
|
||||||
|
return rows.slice().sort((a, b) => Number(b.market_value) - Number(a.market_value));
|
||||||
|
}
|
||||||
|
|
||||||
|
function normalizeHoldingInput(input: { ticker: string; shares: number; avgCost: number; currentPrice: number }) {
|
||||||
|
return {
|
||||||
|
ticker: input.ticker.trim().toUpperCase(),
|
||||||
|
shares: input.shares.toFixed(6),
|
||||||
|
avg_cost: input.avgCost.toFixed(6),
|
||||||
|
current_price: input.currentPrice.toFixed(6)
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function listUserHoldings(userId: string) {
|
||||||
|
const rows = await db
|
||||||
|
.select()
|
||||||
|
.from(holding)
|
||||||
|
.where(eq(holding.user_id, userId));
|
||||||
|
|
||||||
|
return sortByMarketValueDesc(rows.map(toHolding));
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function upsertHoldingRecord(input: {
|
||||||
|
userId: string;
|
||||||
|
ticker: string;
|
||||||
|
shares: number;
|
||||||
|
avgCost: number;
|
||||||
|
currentPrice?: number;
|
||||||
|
}) {
|
||||||
|
const ticker = input.ticker.trim().toUpperCase();
|
||||||
|
const now = new Date().toISOString();
|
||||||
|
|
||||||
|
const [existing] = await db
|
||||||
|
.select()
|
||||||
|
.from(holding)
|
||||||
|
.where(and(eq(holding.user_id, input.userId), eq(holding.ticker, ticker)))
|
||||||
|
.limit(1);
|
||||||
|
|
||||||
|
const currentPrice = Number.isFinite(input.currentPrice)
|
||||||
|
? Number(input.currentPrice)
|
||||||
|
: input.avgCost;
|
||||||
|
|
||||||
|
if (existing) {
|
||||||
|
const normalized = normalizeHoldingInput({
|
||||||
|
ticker,
|
||||||
|
shares: input.shares,
|
||||||
|
avgCost: input.avgCost,
|
||||||
|
currentPrice
|
||||||
|
});
|
||||||
|
|
||||||
|
const next = recalculateHolding({
|
||||||
|
...toHolding(existing),
|
||||||
|
...normalized,
|
||||||
|
updated_at: now,
|
||||||
|
last_price_at: now
|
||||||
|
});
|
||||||
|
|
||||||
|
const [updated] = await db
|
||||||
|
.update(holding)
|
||||||
|
.set({
|
||||||
|
ticker: next.ticker,
|
||||||
|
shares: next.shares,
|
||||||
|
avg_cost: next.avg_cost,
|
||||||
|
current_price: next.current_price,
|
||||||
|
market_value: next.market_value,
|
||||||
|
gain_loss: next.gain_loss,
|
||||||
|
gain_loss_pct: next.gain_loss_pct,
|
||||||
|
updated_at: next.updated_at,
|
||||||
|
last_price_at: next.last_price_at
|
||||||
|
})
|
||||||
|
.where(eq(holding.id, existing.id))
|
||||||
|
.returning();
|
||||||
|
|
||||||
|
return toHolding(updated);
|
||||||
|
}
|
||||||
|
|
||||||
|
const normalized = normalizeHoldingInput({
|
||||||
|
ticker,
|
||||||
|
shares: input.shares,
|
||||||
|
avgCost: input.avgCost,
|
||||||
|
currentPrice
|
||||||
|
});
|
||||||
|
|
||||||
|
const createdBase: Holding = {
|
||||||
|
id: 0,
|
||||||
|
user_id: input.userId,
|
||||||
|
ticker: normalized.ticker,
|
||||||
|
shares: normalized.shares,
|
||||||
|
avg_cost: normalized.avg_cost,
|
||||||
|
current_price: normalized.current_price,
|
||||||
|
market_value: '0',
|
||||||
|
gain_loss: '0',
|
||||||
|
gain_loss_pct: '0',
|
||||||
|
last_price_at: now,
|
||||||
|
created_at: now,
|
||||||
|
updated_at: now
|
||||||
|
};
|
||||||
|
|
||||||
|
const created = recalculateHolding(createdBase);
|
||||||
|
|
||||||
|
const [inserted] = await db
|
||||||
|
.insert(holding)
|
||||||
|
.values({
|
||||||
|
user_id: created.user_id,
|
||||||
|
ticker: created.ticker,
|
||||||
|
shares: created.shares,
|
||||||
|
avg_cost: created.avg_cost,
|
||||||
|
current_price: created.current_price,
|
||||||
|
market_value: created.market_value,
|
||||||
|
gain_loss: created.gain_loss,
|
||||||
|
gain_loss_pct: created.gain_loss_pct,
|
||||||
|
last_price_at: created.last_price_at,
|
||||||
|
created_at: created.created_at,
|
||||||
|
updated_at: created.updated_at
|
||||||
|
})
|
||||||
|
.returning();
|
||||||
|
|
||||||
|
return toHolding(inserted);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function updateHoldingByIdRecord(input: {
|
||||||
|
userId: string;
|
||||||
|
id: number;
|
||||||
|
shares?: number;
|
||||||
|
avgCost?: number;
|
||||||
|
currentPrice?: number;
|
||||||
|
}) {
|
||||||
|
const [existing] = await db
|
||||||
|
.select()
|
||||||
|
.from(holding)
|
||||||
|
.where(and(eq(holding.id, input.id), eq(holding.user_id, input.userId)))
|
||||||
|
.limit(1);
|
||||||
|
|
||||||
|
if (!existing) {
|
||||||
|
return null;
|
||||||
|
}
|
||||||
|
|
||||||
|
const current = toHolding(existing);
|
||||||
|
const shares = Number.isFinite(input.shares)
|
||||||
|
? Number(input.shares)
|
||||||
|
: Number(current.shares);
|
||||||
|
const avgCost = Number.isFinite(input.avgCost)
|
||||||
|
? Number(input.avgCost)
|
||||||
|
: Number(current.avg_cost);
|
||||||
|
const currentPrice = Number.isFinite(input.currentPrice)
|
||||||
|
? Number(input.currentPrice)
|
||||||
|
: Number(current.current_price ?? current.avg_cost);
|
||||||
|
|
||||||
|
const next = recalculateHolding({
|
||||||
|
...current,
|
||||||
|
shares: shares.toFixed(6),
|
||||||
|
avg_cost: avgCost.toFixed(6),
|
||||||
|
current_price: currentPrice.toFixed(6),
|
||||||
|
updated_at: new Date().toISOString(),
|
||||||
|
last_price_at: new Date().toISOString()
|
||||||
|
});
|
||||||
|
|
||||||
|
const [updated] = await db
|
||||||
|
.update(holding)
|
||||||
|
.set({
|
||||||
|
shares: next.shares,
|
||||||
|
avg_cost: next.avg_cost,
|
||||||
|
current_price: next.current_price,
|
||||||
|
market_value: next.market_value,
|
||||||
|
gain_loss: next.gain_loss,
|
||||||
|
gain_loss_pct: next.gain_loss_pct,
|
||||||
|
updated_at: next.updated_at,
|
||||||
|
last_price_at: next.last_price_at
|
||||||
|
})
|
||||||
|
.where(eq(holding.id, existing.id))
|
||||||
|
.returning();
|
||||||
|
|
||||||
|
return toHolding(updated);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function deleteHoldingByIdRecord(userId: string, id: number) {
|
||||||
|
const rows = await db
|
||||||
|
.delete(holding)
|
||||||
|
.where(and(eq(holding.user_id, userId), eq(holding.id, id)))
|
||||||
|
.returning({ id: holding.id });
|
||||||
|
|
||||||
|
return rows.length > 0;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function listHoldingsForPriceRefresh(userId: string) {
|
||||||
|
const rows = await db
|
||||||
|
.select()
|
||||||
|
.from(holding)
|
||||||
|
.where(eq(holding.user_id, userId));
|
||||||
|
|
||||||
|
return rows.map(toHolding);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function applyRefreshedPrices(
|
||||||
|
userId: string,
|
||||||
|
quotes: Map<string, number>,
|
||||||
|
updateTime: string
|
||||||
|
) {
|
||||||
|
const rows = await db
|
||||||
|
.select()
|
||||||
|
.from(holding)
|
||||||
|
.where(eq(holding.user_id, userId));
|
||||||
|
|
||||||
|
let updatedCount = 0;
|
||||||
|
|
||||||
|
for (const row of rows) {
|
||||||
|
const quote = quotes.get(row.ticker);
|
||||||
|
if (quote === undefined) {
|
||||||
|
continue;
|
||||||
|
}
|
||||||
|
|
||||||
|
const next = recalculateHolding({
|
||||||
|
...toHolding(row),
|
||||||
|
current_price: quote.toFixed(6),
|
||||||
|
last_price_at: updateTime,
|
||||||
|
updated_at: updateTime
|
||||||
|
});
|
||||||
|
|
||||||
|
await db
|
||||||
|
.update(holding)
|
||||||
|
.set({
|
||||||
|
current_price: next.current_price,
|
||||||
|
market_value: next.market_value,
|
||||||
|
gain_loss: next.gain_loss,
|
||||||
|
gain_loss_pct: next.gain_loss_pct,
|
||||||
|
last_price_at: next.last_price_at,
|
||||||
|
updated_at: next.updated_at
|
||||||
|
})
|
||||||
|
.where(eq(holding.id, row.id));
|
||||||
|
|
||||||
|
updatedCount += 1;
|
||||||
|
}
|
||||||
|
|
||||||
|
return updatedCount;
|
||||||
|
}
|
||||||
48
lib/server/repos/insights.ts
Normal file
48
lib/server/repos/insights.ts
Normal file
@@ -0,0 +1,48 @@
|
|||||||
|
import { desc, eq } from 'drizzle-orm';
|
||||||
|
import type { PortfolioInsight } from '@/lib/types';
|
||||||
|
import { db } from '@/lib/server/db';
|
||||||
|
import { portfolioInsight } from '@/lib/server/db/schema';
|
||||||
|
|
||||||
|
type InsightRow = typeof portfolioInsight.$inferSelect;
|
||||||
|
|
||||||
|
function toInsight(row: InsightRow): PortfolioInsight {
|
||||||
|
return {
|
||||||
|
id: row.id,
|
||||||
|
user_id: row.user_id,
|
||||||
|
provider: row.provider,
|
||||||
|
model: row.model,
|
||||||
|
content: row.content,
|
||||||
|
created_at: row.created_at
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function createPortfolioInsight(input: {
|
||||||
|
userId: string;
|
||||||
|
provider: string;
|
||||||
|
model: string;
|
||||||
|
content: string;
|
||||||
|
}) {
|
||||||
|
const [created] = await db
|
||||||
|
.insert(portfolioInsight)
|
||||||
|
.values({
|
||||||
|
user_id: input.userId,
|
||||||
|
provider: input.provider,
|
||||||
|
model: input.model,
|
||||||
|
content: input.content,
|
||||||
|
created_at: new Date().toISOString()
|
||||||
|
})
|
||||||
|
.returning();
|
||||||
|
|
||||||
|
return toInsight(created);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getLatestPortfolioInsight(userId: string) {
|
||||||
|
const [row] = await db
|
||||||
|
.select()
|
||||||
|
.from(portfolioInsight)
|
||||||
|
.where(eq(portfolioInsight.user_id, userId))
|
||||||
|
.orderBy(desc(portfolioInsight.created_at))
|
||||||
|
.limit(1);
|
||||||
|
|
||||||
|
return row ? toInsight(row) : null;
|
||||||
|
}
|
||||||
195
lib/server/repos/tasks.ts
Normal file
195
lib/server/repos/tasks.ts
Normal file
@@ -0,0 +1,195 @@
|
|||||||
|
import { and, desc, eq, inArray, sql } from 'drizzle-orm';
|
||||||
|
import type { Task, TaskStatus, TaskType } from '@/lib/types';
|
||||||
|
import { db } from '@/lib/server/db';
|
||||||
|
import { taskRun } from '@/lib/server/db/schema';
|
||||||
|
|
||||||
|
type TaskRow = typeof taskRun.$inferSelect;
|
||||||
|
|
||||||
|
type CreateTaskInput = {
|
||||||
|
id: string;
|
||||||
|
user_id: string;
|
||||||
|
task_type: TaskType;
|
||||||
|
payload: Record<string, unknown>;
|
||||||
|
priority: number;
|
||||||
|
max_attempts: number;
|
||||||
|
};
|
||||||
|
|
||||||
|
function toTask(row: TaskRow): Task {
|
||||||
|
return {
|
||||||
|
id: row.id,
|
||||||
|
user_id: row.user_id,
|
||||||
|
task_type: row.task_type,
|
||||||
|
status: row.status,
|
||||||
|
priority: row.priority,
|
||||||
|
payload: row.payload,
|
||||||
|
result: row.result,
|
||||||
|
error: row.error,
|
||||||
|
attempts: row.attempts,
|
||||||
|
max_attempts: row.max_attempts,
|
||||||
|
workflow_run_id: row.workflow_run_id,
|
||||||
|
created_at: row.created_at,
|
||||||
|
updated_at: row.updated_at,
|
||||||
|
finished_at: row.finished_at
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function createTaskRunRecord(input: CreateTaskInput) {
|
||||||
|
const now = new Date().toISOString();
|
||||||
|
|
||||||
|
const [row] = await db
|
||||||
|
.insert(taskRun)
|
||||||
|
.values({
|
||||||
|
id: input.id,
|
||||||
|
user_id: input.user_id,
|
||||||
|
task_type: input.task_type,
|
||||||
|
status: 'queued',
|
||||||
|
priority: input.priority,
|
||||||
|
payload: input.payload,
|
||||||
|
result: null,
|
||||||
|
error: null,
|
||||||
|
attempts: 0,
|
||||||
|
max_attempts: input.max_attempts,
|
||||||
|
workflow_run_id: null,
|
||||||
|
created_at: now,
|
||||||
|
updated_at: now,
|
||||||
|
finished_at: null
|
||||||
|
})
|
||||||
|
.returning();
|
||||||
|
|
||||||
|
return toTask(row);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function setTaskWorkflowRunId(taskId: string, workflowRunId: string) {
|
||||||
|
await db
|
||||||
|
.update(taskRun)
|
||||||
|
.set({
|
||||||
|
workflow_run_id: workflowRunId,
|
||||||
|
updated_at: new Date().toISOString()
|
||||||
|
})
|
||||||
|
.where(eq(taskRun.id, taskId));
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getTaskByIdForUser(taskId: string, userId: string) {
|
||||||
|
const [row] = await db
|
||||||
|
.select()
|
||||||
|
.from(taskRun)
|
||||||
|
.where(and(eq(taskRun.id, taskId), eq(taskRun.user_id, userId)))
|
||||||
|
.limit(1);
|
||||||
|
|
||||||
|
return row ? toTask(row) : null;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function listRecentTasksForUser(
|
||||||
|
userId: string,
|
||||||
|
limit = 20,
|
||||||
|
statuses?: TaskStatus[]
|
||||||
|
) {
|
||||||
|
const safeLimit = Math.min(Math.max(Math.trunc(limit), 1), 200);
|
||||||
|
|
||||||
|
const rows = statuses && statuses.length > 0
|
||||||
|
? await db
|
||||||
|
.select()
|
||||||
|
.from(taskRun)
|
||||||
|
.where(and(eq(taskRun.user_id, userId), inArray(taskRun.status, statuses)))
|
||||||
|
.orderBy(desc(taskRun.created_at))
|
||||||
|
.limit(safeLimit)
|
||||||
|
: await db
|
||||||
|
.select()
|
||||||
|
.from(taskRun)
|
||||||
|
.where(eq(taskRun.user_id, userId))
|
||||||
|
.orderBy(desc(taskRun.created_at))
|
||||||
|
.limit(safeLimit);
|
||||||
|
|
||||||
|
return rows.map(toTask);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function countTasksByStatus() {
|
||||||
|
const rows = await db
|
||||||
|
.select({
|
||||||
|
status: taskRun.status,
|
||||||
|
count: sql<string>`count(*)`
|
||||||
|
})
|
||||||
|
.from(taskRun)
|
||||||
|
.groupBy(taskRun.status);
|
||||||
|
|
||||||
|
const queue: Record<string, number> = {};
|
||||||
|
|
||||||
|
for (const row of rows) {
|
||||||
|
queue[row.status] = Number(row.count);
|
||||||
|
}
|
||||||
|
|
||||||
|
return queue;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function claimQueuedTask(taskId: string) {
|
||||||
|
const [row] = await db
|
||||||
|
.update(taskRun)
|
||||||
|
.set({
|
||||||
|
status: 'running',
|
||||||
|
attempts: sql`${taskRun.attempts} + 1`,
|
||||||
|
updated_at: new Date().toISOString()
|
||||||
|
})
|
||||||
|
.where(and(eq(taskRun.id, taskId), eq(taskRun.status, 'queued')))
|
||||||
|
.returning();
|
||||||
|
|
||||||
|
return row ? toTask(row) : null;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function completeTask(taskId: string, result: Record<string, unknown>) {
|
||||||
|
const [row] = await db
|
||||||
|
.update(taskRun)
|
||||||
|
.set({
|
||||||
|
status: 'completed',
|
||||||
|
result,
|
||||||
|
error: null,
|
||||||
|
updated_at: new Date().toISOString(),
|
||||||
|
finished_at: new Date().toISOString()
|
||||||
|
})
|
||||||
|
.where(eq(taskRun.id, taskId))
|
||||||
|
.returning();
|
||||||
|
|
||||||
|
return row ? toTask(row) : null;
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function markTaskFailure(taskId: string, reason: string) {
|
||||||
|
const [current] = await db
|
||||||
|
.select()
|
||||||
|
.from(taskRun)
|
||||||
|
.where(eq(taskRun.id, taskId))
|
||||||
|
.limit(1);
|
||||||
|
|
||||||
|
if (!current) {
|
||||||
|
return {
|
||||||
|
task: null,
|
||||||
|
shouldRetry: false
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
const shouldRetry = current.attempts < current.max_attempts;
|
||||||
|
|
||||||
|
const [updated] = await db
|
||||||
|
.update(taskRun)
|
||||||
|
.set({
|
||||||
|
status: shouldRetry ? 'queued' : 'failed',
|
||||||
|
error: reason,
|
||||||
|
updated_at: new Date().toISOString(),
|
||||||
|
finished_at: shouldRetry ? null : new Date().toISOString()
|
||||||
|
})
|
||||||
|
.where(eq(taskRun.id, taskId))
|
||||||
|
.returning();
|
||||||
|
|
||||||
|
return {
|
||||||
|
task: updated ? toTask(updated) : null,
|
||||||
|
shouldRetry
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function getTaskById(taskId: string) {
|
||||||
|
const [row] = await db
|
||||||
|
.select()
|
||||||
|
.from(taskRun)
|
||||||
|
.where(eq(taskRun.id, taskId))
|
||||||
|
.limit(1);
|
||||||
|
|
||||||
|
return row ? toTask(row) : null;
|
||||||
|
}
|
||||||
63
lib/server/repos/watchlist.ts
Normal file
63
lib/server/repos/watchlist.ts
Normal file
@@ -0,0 +1,63 @@
|
|||||||
|
import { and, desc, eq } from 'drizzle-orm';
|
||||||
|
import type { WatchlistItem } from '@/lib/types';
|
||||||
|
import { db } from '@/lib/server/db';
|
||||||
|
import { watchlistItem } from '@/lib/server/db/schema';
|
||||||
|
|
||||||
|
type WatchlistRow = typeof watchlistItem.$inferSelect;
|
||||||
|
|
||||||
|
function toWatchlistItem(row: WatchlistRow): WatchlistItem {
|
||||||
|
return {
|
||||||
|
id: row.id,
|
||||||
|
user_id: row.user_id,
|
||||||
|
ticker: row.ticker,
|
||||||
|
company_name: row.company_name,
|
||||||
|
sector: row.sector,
|
||||||
|
created_at: row.created_at
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function listWatchlistItems(userId: string) {
|
||||||
|
const rows = await db
|
||||||
|
.select()
|
||||||
|
.from(watchlistItem)
|
||||||
|
.where(eq(watchlistItem.user_id, userId))
|
||||||
|
.orderBy(desc(watchlistItem.created_at));
|
||||||
|
|
||||||
|
return rows.map(toWatchlistItem);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function upsertWatchlistItemRecord(input: {
|
||||||
|
userId: string;
|
||||||
|
ticker: string;
|
||||||
|
companyName: string;
|
||||||
|
sector?: string;
|
||||||
|
}) {
|
||||||
|
const [row] = await db
|
||||||
|
.insert(watchlistItem)
|
||||||
|
.values({
|
||||||
|
user_id: input.userId,
|
||||||
|
ticker: input.ticker,
|
||||||
|
company_name: input.companyName,
|
||||||
|
sector: input.sector?.trim() ? input.sector.trim() : null,
|
||||||
|
created_at: new Date().toISOString()
|
||||||
|
})
|
||||||
|
.onConflictDoUpdate({
|
||||||
|
target: [watchlistItem.user_id, watchlistItem.ticker],
|
||||||
|
set: {
|
||||||
|
company_name: input.companyName,
|
||||||
|
sector: input.sector?.trim() ? input.sector.trim() : null
|
||||||
|
}
|
||||||
|
})
|
||||||
|
.returning();
|
||||||
|
|
||||||
|
return toWatchlistItem(row);
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function deleteWatchlistItemRecord(userId: string, id: number) {
|
||||||
|
const removed = await db
|
||||||
|
.delete(watchlistItem)
|
||||||
|
.where(and(eq(watchlistItem.user_id, userId), eq(watchlistItem.id, id)))
|
||||||
|
.returning({ id: watchlistItem.id });
|
||||||
|
|
||||||
|
return removed.length > 0;
|
||||||
|
}
|
||||||
@@ -35,6 +35,8 @@ type SecFiling = {
|
|||||||
filingDate: string;
|
filingDate: string;
|
||||||
accessionNumber: string;
|
accessionNumber: string;
|
||||||
filingUrl: string | null;
|
filingUrl: string | null;
|
||||||
|
submissionUrl: string | null;
|
||||||
|
primaryDocument: string | null;
|
||||||
};
|
};
|
||||||
|
|
||||||
const SUPPORTED_FORMS: FilingType[] = ['10-K', '10-Q', '8-K'];
|
const SUPPORTED_FORMS: FilingType[] = ['10-K', '10-Q', '8-K'];
|
||||||
@@ -71,15 +73,17 @@ function fallbackFilings(ticker: string, limit: number): SecFiling[] {
|
|||||||
const date = new Date(Date.now() - i * 1000 * 60 * 60 * 24 * 35).toISOString().slice(0, 10);
|
const date = new Date(Date.now() - i * 1000 * 60 * 60 * 24 * 35).toISOString().slice(0, 10);
|
||||||
const accessionNumber = `${Date.now()}-${i}`;
|
const accessionNumber = `${Date.now()}-${i}`;
|
||||||
|
|
||||||
filings.push({
|
filings.push({
|
||||||
ticker: normalized,
|
ticker: normalized,
|
||||||
cik: String(100000 + i),
|
cik: String(100000 + i),
|
||||||
companyName,
|
companyName,
|
||||||
filingType,
|
filingType,
|
||||||
filingDate: date,
|
filingDate: date,
|
||||||
accessionNumber,
|
accessionNumber,
|
||||||
filingUrl: null
|
filingUrl: null,
|
||||||
});
|
submissionUrl: null,
|
||||||
|
primaryDocument: null
|
||||||
|
});
|
||||||
}
|
}
|
||||||
|
|
||||||
return filings;
|
return filings;
|
||||||
@@ -174,6 +178,7 @@ export async function fetchRecentFilings(ticker: string, limit = 20): Promise<Se
|
|||||||
const cikPadded = company.cik.padStart(10, '0');
|
const cikPadded = company.cik.padStart(10, '0');
|
||||||
const payload = await fetchJson<RecentFilingsPayload>(`https://data.sec.gov/submissions/CIK${cikPadded}.json`);
|
const payload = await fetchJson<RecentFilingsPayload>(`https://data.sec.gov/submissions/CIK${cikPadded}.json`);
|
||||||
const recent = payload.filings?.recent;
|
const recent = payload.filings?.recent;
|
||||||
|
const submissionUrl = `https://data.sec.gov/submissions/CIK${cikPadded}.json`;
|
||||||
|
|
||||||
if (!recent) {
|
if (!recent) {
|
||||||
return fallbackFilings(company.ticker, safeLimit);
|
return fallbackFilings(company.ticker, safeLimit);
|
||||||
@@ -210,7 +215,9 @@ export async function fetchRecentFilings(ticker: string, limit = 20): Promise<Se
|
|||||||
filingType,
|
filingType,
|
||||||
filingDate: filingDates[i] ?? todayIso(),
|
filingDate: filingDates[i] ?? todayIso(),
|
||||||
accessionNumber,
|
accessionNumber,
|
||||||
filingUrl
|
filingUrl,
|
||||||
|
submissionUrl,
|
||||||
|
primaryDocument: documentName ?? null
|
||||||
});
|
});
|
||||||
|
|
||||||
if (filings.length >= safeLimit) {
|
if (filings.length >= safeLimit) {
|
||||||
|
|||||||
@@ -1,84 +0,0 @@
|
|||||||
import { mkdir, readFile, rename, writeFile } from 'node:fs/promises';
|
|
||||||
import path from 'node:path';
|
|
||||||
import type { Filing, Holding, PortfolioInsight, Task, WatchlistItem } from '@/lib/types';
|
|
||||||
|
|
||||||
export type DataStore = {
|
|
||||||
counters: {
|
|
||||||
watchlist: number;
|
|
||||||
holdings: number;
|
|
||||||
filings: number;
|
|
||||||
insights: number;
|
|
||||||
};
|
|
||||||
watchlist: WatchlistItem[];
|
|
||||||
holdings: Holding[];
|
|
||||||
filings: Filing[];
|
|
||||||
tasks: Task[];
|
|
||||||
insights: PortfolioInsight[];
|
|
||||||
};
|
|
||||||
|
|
||||||
const DATA_DIR = path.join(process.cwd(), 'data');
|
|
||||||
const STORE_PATH = path.join(DATA_DIR, 'store.json');
|
|
||||||
|
|
||||||
let writeQueue = Promise.resolve();
|
|
||||||
|
|
||||||
function createDefaultStore(): DataStore {
|
|
||||||
return {
|
|
||||||
counters: {
|
|
||||||
watchlist: 0,
|
|
||||||
holdings: 0,
|
|
||||||
filings: 0,
|
|
||||||
insights: 0
|
|
||||||
},
|
|
||||||
watchlist: [],
|
|
||||||
holdings: [],
|
|
||||||
filings: [],
|
|
||||||
tasks: [],
|
|
||||||
insights: []
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
async function ensureStoreFile() {
|
|
||||||
await mkdir(DATA_DIR, { recursive: true });
|
|
||||||
|
|
||||||
try {
|
|
||||||
await readFile(STORE_PATH, 'utf8');
|
|
||||||
} catch {
|
|
||||||
const defaultStore = createDefaultStore();
|
|
||||||
defaultStore.counters.insights = defaultStore.insights.length;
|
|
||||||
await writeFile(STORE_PATH, JSON.stringify(defaultStore, null, 2), 'utf8');
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function readStore(): Promise<DataStore> {
|
|
||||||
await ensureStoreFile();
|
|
||||||
const raw = await readFile(STORE_PATH, 'utf8');
|
|
||||||
return JSON.parse(raw) as DataStore;
|
|
||||||
}
|
|
||||||
|
|
||||||
async function writeStore(store: DataStore) {
|
|
||||||
const tempPath = `${STORE_PATH}.tmp`;
|
|
||||||
await writeFile(tempPath, JSON.stringify(store, null, 2), 'utf8');
|
|
||||||
await rename(tempPath, STORE_PATH);
|
|
||||||
}
|
|
||||||
|
|
||||||
function cloneStore(store: DataStore): DataStore {
|
|
||||||
return JSON.parse(JSON.stringify(store)) as DataStore;
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function getStoreSnapshot() {
|
|
||||||
const store = await readStore();
|
|
||||||
return cloneStore(store);
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function withStore<T>(mutator: (store: DataStore) => T | Promise<T>): Promise<T> {
|
|
||||||
const run = async () => {
|
|
||||||
const store = await readStore();
|
|
||||||
const result = await mutator(store);
|
|
||||||
await writeStore(store);
|
|
||||||
return result;
|
|
||||||
};
|
|
||||||
|
|
||||||
const nextRun = writeQueue.then(run, run);
|
|
||||||
writeQueue = nextRun.then(() => undefined, () => undefined);
|
|
||||||
return await nextRun;
|
|
||||||
}
|
|
||||||
218
lib/server/task-processors.ts
Normal file
218
lib/server/task-processors.ts
Normal file
@@ -0,0 +1,218 @@
|
|||||||
|
import type { Filing, Holding, Task } from '@/lib/types';
|
||||||
|
import { runOpenClawAnalysis } from '@/lib/server/openclaw';
|
||||||
|
import { buildPortfolioSummary } from '@/lib/server/portfolio';
|
||||||
|
import { getQuote } from '@/lib/server/prices';
|
||||||
|
import {
|
||||||
|
getFilingByAccession,
|
||||||
|
saveFilingAnalysis,
|
||||||
|
upsertFilingsRecords
|
||||||
|
} from '@/lib/server/repos/filings';
|
||||||
|
import {
|
||||||
|
applyRefreshedPrices,
|
||||||
|
listHoldingsForPriceRefresh,
|
||||||
|
listUserHoldings
|
||||||
|
} from '@/lib/server/repos/holdings';
|
||||||
|
import { createPortfolioInsight } from '@/lib/server/repos/insights';
|
||||||
|
import { fetchFilingMetrics, fetchRecentFilings } from '@/lib/server/sec';
|
||||||
|
|
||||||
|
function toTaskResult(value: unknown): Record<string, unknown> {
|
||||||
|
if (!value || typeof value !== 'object' || Array.isArray(value)) {
|
||||||
|
return { value };
|
||||||
|
}
|
||||||
|
|
||||||
|
return value as Record<string, unknown>;
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseTicker(raw: unknown) {
|
||||||
|
if (typeof raw !== 'string' || raw.trim().length < 1) {
|
||||||
|
throw new Error('Ticker is required');
|
||||||
|
}
|
||||||
|
|
||||||
|
return raw.trim().toUpperCase();
|
||||||
|
}
|
||||||
|
|
||||||
|
function parseLimit(raw: unknown, fallback: number, min: number, max: number) {
|
||||||
|
const numberValue = typeof raw === 'number' ? raw : Number(raw);
|
||||||
|
|
||||||
|
if (!Number.isFinite(numberValue)) {
|
||||||
|
return fallback;
|
||||||
|
}
|
||||||
|
|
||||||
|
const intValue = Math.trunc(numberValue);
|
||||||
|
return Math.min(Math.max(intValue, min), max);
|
||||||
|
}
|
||||||
|
|
||||||
|
function filingLinks(filing: {
|
||||||
|
filingUrl: string | null;
|
||||||
|
submissionUrl: string | null;
|
||||||
|
}) {
|
||||||
|
const links: Array<{ link_type: string; url: string }> = [];
|
||||||
|
|
||||||
|
if (filing.filingUrl) {
|
||||||
|
links.push({ link_type: 'primary_document', url: filing.filingUrl });
|
||||||
|
}
|
||||||
|
|
||||||
|
if (filing.submissionUrl) {
|
||||||
|
links.push({ link_type: 'submission_index', url: filing.submissionUrl });
|
||||||
|
}
|
||||||
|
|
||||||
|
return links;
|
||||||
|
}
|
||||||
|
|
||||||
|
async function processSyncFilings(task: Task) {
|
||||||
|
const ticker = parseTicker(task.payload.ticker);
|
||||||
|
const limit = parseLimit(task.payload.limit, 20, 1, 50);
|
||||||
|
const filings = await fetchRecentFilings(ticker, limit);
|
||||||
|
const metricsByCik = new Map<string, Filing['metrics']>();
|
||||||
|
|
||||||
|
for (const filing of filings) {
|
||||||
|
if (!metricsByCik.has(filing.cik)) {
|
||||||
|
const metrics = await fetchFilingMetrics(filing.cik, filing.ticker);
|
||||||
|
metricsByCik.set(filing.cik, metrics);
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
const saveResult = await upsertFilingsRecords(
|
||||||
|
filings.map((filing) => ({
|
||||||
|
ticker: filing.ticker,
|
||||||
|
filing_type: filing.filingType,
|
||||||
|
filing_date: filing.filingDate,
|
||||||
|
accession_number: filing.accessionNumber,
|
||||||
|
cik: filing.cik,
|
||||||
|
company_name: filing.companyName,
|
||||||
|
filing_url: filing.filingUrl,
|
||||||
|
submission_url: filing.submissionUrl,
|
||||||
|
primary_document: filing.primaryDocument,
|
||||||
|
metrics: metricsByCik.get(filing.cik) ?? null,
|
||||||
|
links: filingLinks(filing)
|
||||||
|
}))
|
||||||
|
);
|
||||||
|
|
||||||
|
return {
|
||||||
|
ticker,
|
||||||
|
fetched: filings.length,
|
||||||
|
inserted: saveResult.inserted,
|
||||||
|
updated: saveResult.updated
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
async function processRefreshPrices(task: Task) {
|
||||||
|
const userId = task.user_id;
|
||||||
|
if (!userId) {
|
||||||
|
throw new Error('Task is missing user scope');
|
||||||
|
}
|
||||||
|
|
||||||
|
const userHoldings = await listHoldingsForPriceRefresh(userId);
|
||||||
|
const tickers = [...new Set(userHoldings.map((entry) => entry.ticker))];
|
||||||
|
const quotes = new Map<string, number>();
|
||||||
|
|
||||||
|
for (const ticker of tickers) {
|
||||||
|
const quote = await getQuote(ticker);
|
||||||
|
quotes.set(ticker, quote);
|
||||||
|
}
|
||||||
|
|
||||||
|
const updatedCount = await applyRefreshedPrices(userId, quotes, new Date().toISOString());
|
||||||
|
|
||||||
|
return {
|
||||||
|
updatedCount,
|
||||||
|
totalTickers: tickers.length
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
async function processAnalyzeFiling(task: Task) {
|
||||||
|
const accessionNumber = typeof task.payload.accessionNumber === 'string'
|
||||||
|
? task.payload.accessionNumber
|
||||||
|
: '';
|
||||||
|
|
||||||
|
if (!accessionNumber) {
|
||||||
|
throw new Error('accessionNumber is required');
|
||||||
|
}
|
||||||
|
|
||||||
|
const filing = await getFilingByAccession(accessionNumber);
|
||||||
|
|
||||||
|
if (!filing) {
|
||||||
|
throw new Error(`Filing ${accessionNumber} not found`);
|
||||||
|
}
|
||||||
|
|
||||||
|
const prompt = [
|
||||||
|
'You are a fiscal research assistant focused on regulatory signals.',
|
||||||
|
`Analyze this SEC filing from ${filing.company_name} (${filing.ticker}).`,
|
||||||
|
`Form: ${filing.filing_type}`,
|
||||||
|
`Filed: ${filing.filing_date}`,
|
||||||
|
`Metrics: ${JSON.stringify(filing.metrics ?? {})}`,
|
||||||
|
'Return concise sections: Thesis, Red Flags, Follow-up Questions, Portfolio Impact.'
|
||||||
|
].join('\n');
|
||||||
|
|
||||||
|
const analysis = await runOpenClawAnalysis(prompt, 'Use concise institutional analyst language.');
|
||||||
|
|
||||||
|
await saveFilingAnalysis(accessionNumber, {
|
||||||
|
provider: analysis.provider,
|
||||||
|
model: analysis.model,
|
||||||
|
text: analysis.text
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
accessionNumber,
|
||||||
|
provider: analysis.provider,
|
||||||
|
model: analysis.model
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
function holdingDigest(holdings: Holding[]) {
|
||||||
|
return holdings.map((holding) => ({
|
||||||
|
ticker: holding.ticker,
|
||||||
|
shares: holding.shares,
|
||||||
|
avgCost: holding.avg_cost,
|
||||||
|
currentPrice: holding.current_price,
|
||||||
|
marketValue: holding.market_value,
|
||||||
|
gainLoss: holding.gain_loss,
|
||||||
|
gainLossPct: holding.gain_loss_pct
|
||||||
|
}));
|
||||||
|
}
|
||||||
|
|
||||||
|
async function processPortfolioInsights(task: Task) {
|
||||||
|
const userId = task.user_id;
|
||||||
|
if (!userId) {
|
||||||
|
throw new Error('Task is missing user scope');
|
||||||
|
}
|
||||||
|
|
||||||
|
const userHoldings = await listUserHoldings(userId);
|
||||||
|
const summary = buildPortfolioSummary(userHoldings);
|
||||||
|
|
||||||
|
const prompt = [
|
||||||
|
'Generate portfolio intelligence with actionable recommendations.',
|
||||||
|
`Portfolio summary: ${JSON.stringify(summary)}`,
|
||||||
|
`Holdings: ${JSON.stringify(holdingDigest(userHoldings))}`,
|
||||||
|
'Respond with: 1) health score (0-100), 2) top 3 risks, 3) top 3 opportunities, 4) next actions in 7 days.'
|
||||||
|
].join('\n');
|
||||||
|
|
||||||
|
const analysis = await runOpenClawAnalysis(prompt, 'Act as a risk-aware buy-side analyst.');
|
||||||
|
|
||||||
|
await createPortfolioInsight({
|
||||||
|
userId,
|
||||||
|
provider: analysis.provider,
|
||||||
|
model: analysis.model,
|
||||||
|
content: analysis.text
|
||||||
|
});
|
||||||
|
|
||||||
|
return {
|
||||||
|
provider: analysis.provider,
|
||||||
|
model: analysis.model,
|
||||||
|
summary
|
||||||
|
};
|
||||||
|
}
|
||||||
|
|
||||||
|
export async function runTaskProcessor(task: Task) {
|
||||||
|
switch (task.task_type) {
|
||||||
|
case 'sync_filings':
|
||||||
|
return toTaskResult(await processSyncFilings(task));
|
||||||
|
case 'refresh_prices':
|
||||||
|
return toTaskResult(await processRefreshPrices(task));
|
||||||
|
case 'analyze_filing':
|
||||||
|
return toTaskResult(await processAnalyzeFiling(task));
|
||||||
|
case 'portfolio_insights':
|
||||||
|
return toTaskResult(await processPortfolioInsights(task));
|
||||||
|
default:
|
||||||
|
throw new Error(`Unsupported task type: ${task.task_type}`);
|
||||||
|
}
|
||||||
|
}
|
||||||
@@ -1,10 +1,15 @@
|
|||||||
import { randomUUID } from 'node:crypto';
|
import { randomUUID } from 'node:crypto';
|
||||||
import type { Filing, Holding, PortfolioInsight, Task, TaskStatus, TaskType } from '@/lib/types';
|
import { start } from 'workflow/api';
|
||||||
import { runOpenClawAnalysis } from '@/lib/server/openclaw';
|
import type { Task, TaskStatus, TaskType } from '@/lib/types';
|
||||||
import { buildPortfolioSummary, recalculateHolding } from '@/lib/server/portfolio';
|
import { runTaskWorkflow } from '@/app/workflows/task-runner';
|
||||||
import { getQuote } from '@/lib/server/prices';
|
import {
|
||||||
import { fetchFilingMetrics, fetchRecentFilings } from '@/lib/server/sec';
|
countTasksByStatus,
|
||||||
import { getStoreSnapshot, withStore } from '@/lib/server/store';
|
createTaskRunRecord,
|
||||||
|
getTaskByIdForUser,
|
||||||
|
listRecentTasksForUser,
|
||||||
|
markTaskFailure,
|
||||||
|
setTaskWorkflowRunId
|
||||||
|
} from '@/lib/server/repos/tasks';
|
||||||
|
|
||||||
type EnqueueTaskInput = {
|
type EnqueueTaskInput = {
|
||||||
userId: string;
|
userId: string;
|
||||||
@@ -14,410 +19,41 @@ type EnqueueTaskInput = {
|
|||||||
maxAttempts?: number;
|
maxAttempts?: number;
|
||||||
};
|
};
|
||||||
|
|
||||||
const activeTaskRuns = new Set<string>();
|
|
||||||
|
|
||||||
function nowIso() {
|
|
||||||
return new Date().toISOString();
|
|
||||||
}
|
|
||||||
|
|
||||||
function toTaskResult(value: unknown): Record<string, unknown> {
|
|
||||||
if (!value || typeof value !== 'object' || Array.isArray(value)) {
|
|
||||||
return { value };
|
|
||||||
}
|
|
||||||
|
|
||||||
return value as Record<string, unknown>;
|
|
||||||
}
|
|
||||||
|
|
||||||
function parseTicker(raw: unknown) {
|
|
||||||
if (typeof raw !== 'string' || raw.trim().length < 1) {
|
|
||||||
throw new Error('Ticker is required');
|
|
||||||
}
|
|
||||||
|
|
||||||
return raw.trim().toUpperCase();
|
|
||||||
}
|
|
||||||
|
|
||||||
function parseLimit(raw: unknown, fallback: number, min: number, max: number) {
|
|
||||||
const numberValue = typeof raw === 'number' ? raw : Number(raw);
|
|
||||||
|
|
||||||
if (!Number.isFinite(numberValue)) {
|
|
||||||
return fallback;
|
|
||||||
}
|
|
||||||
|
|
||||||
const intValue = Math.trunc(numberValue);
|
|
||||||
return Math.min(Math.max(intValue, min), max);
|
|
||||||
}
|
|
||||||
|
|
||||||
function queueTaskRun(taskId: string, delayMs = 40) {
|
|
||||||
setTimeout(() => {
|
|
||||||
void processTask(taskId);
|
|
||||||
}, delayMs);
|
|
||||||
}
|
|
||||||
|
|
||||||
async function markTask(taskId: string, mutator: (task: Task) => void) {
|
|
||||||
await withStore((store) => {
|
|
||||||
const index = store.tasks.findIndex((task) => task.id === taskId);
|
|
||||||
if (index < 0) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
const task = store.tasks[index];
|
|
||||||
mutator(task);
|
|
||||||
task.updated_at = nowIso();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
|
|
||||||
async function processSyncFilings(task: Task) {
|
|
||||||
const ticker = parseTicker(task.payload.ticker);
|
|
||||||
const limit = parseLimit(task.payload.limit, 20, 1, 50);
|
|
||||||
const filings = await fetchRecentFilings(ticker, limit);
|
|
||||||
const metricsByCik = new Map<string, Filing['metrics']>();
|
|
||||||
|
|
||||||
for (const filing of filings) {
|
|
||||||
if (!metricsByCik.has(filing.cik)) {
|
|
||||||
const metrics = await fetchFilingMetrics(filing.cik, filing.ticker);
|
|
||||||
metricsByCik.set(filing.cik, metrics);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
let insertedCount = 0;
|
|
||||||
let updatedCount = 0;
|
|
||||||
|
|
||||||
await withStore((store) => {
|
|
||||||
for (const filing of filings) {
|
|
||||||
const existingIndex = store.filings.findIndex((entry) => entry.accession_number === filing.accessionNumber);
|
|
||||||
const timestamp = nowIso();
|
|
||||||
const metrics = metricsByCik.get(filing.cik) ?? null;
|
|
||||||
|
|
||||||
if (existingIndex >= 0) {
|
|
||||||
const existing = store.filings[existingIndex];
|
|
||||||
store.filings[existingIndex] = {
|
|
||||||
...existing,
|
|
||||||
ticker: filing.ticker,
|
|
||||||
cik: filing.cik,
|
|
||||||
filing_type: filing.filingType,
|
|
||||||
filing_date: filing.filingDate,
|
|
||||||
company_name: filing.companyName,
|
|
||||||
filing_url: filing.filingUrl,
|
|
||||||
metrics,
|
|
||||||
updated_at: timestamp
|
|
||||||
};
|
|
||||||
updatedCount += 1;
|
|
||||||
} else {
|
|
||||||
store.counters.filings += 1;
|
|
||||||
store.filings.unshift({
|
|
||||||
id: store.counters.filings,
|
|
||||||
ticker: filing.ticker,
|
|
||||||
filing_type: filing.filingType,
|
|
||||||
filing_date: filing.filingDate,
|
|
||||||
accession_number: filing.accessionNumber,
|
|
||||||
cik: filing.cik,
|
|
||||||
company_name: filing.companyName,
|
|
||||||
filing_url: filing.filingUrl,
|
|
||||||
metrics,
|
|
||||||
analysis: null,
|
|
||||||
created_at: timestamp,
|
|
||||||
updated_at: timestamp
|
|
||||||
});
|
|
||||||
insertedCount += 1;
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
store.filings.sort((a, b) => {
|
|
||||||
const byDate = Date.parse(b.filing_date) - Date.parse(a.filing_date);
|
|
||||||
return Number.isFinite(byDate) && byDate !== 0
|
|
||||||
? byDate
|
|
||||||
: Date.parse(b.updated_at) - Date.parse(a.updated_at);
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
return {
|
|
||||||
ticker,
|
|
||||||
fetched: filings.length,
|
|
||||||
inserted: insertedCount,
|
|
||||||
updated: updatedCount
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
async function processRefreshPrices(task: Task) {
|
|
||||||
const userId = task.user_id;
|
|
||||||
if (!userId) {
|
|
||||||
throw new Error('Task is missing user scope');
|
|
||||||
}
|
|
||||||
|
|
||||||
const snapshot = await getStoreSnapshot();
|
|
||||||
const userHoldings = snapshot.holdings.filter((holding) => holding.user_id === userId);
|
|
||||||
const tickers = [...new Set(userHoldings.map((holding) => holding.ticker))];
|
|
||||||
const quotes = new Map<string, number>();
|
|
||||||
|
|
||||||
for (const ticker of tickers) {
|
|
||||||
const quote = await getQuote(ticker);
|
|
||||||
quotes.set(ticker, quote);
|
|
||||||
}
|
|
||||||
|
|
||||||
let updatedCount = 0;
|
|
||||||
const updateTime = nowIso();
|
|
||||||
|
|
||||||
await withStore((store) => {
|
|
||||||
store.holdings = store.holdings.map((holding) => {
|
|
||||||
if (holding.user_id !== userId) {
|
|
||||||
return holding;
|
|
||||||
}
|
|
||||||
|
|
||||||
const quote = quotes.get(holding.ticker);
|
|
||||||
if (quote === undefined) {
|
|
||||||
return holding;
|
|
||||||
}
|
|
||||||
|
|
||||||
updatedCount += 1;
|
|
||||||
return recalculateHolding({
|
|
||||||
...holding,
|
|
||||||
current_price: quote.toFixed(6),
|
|
||||||
last_price_at: updateTime,
|
|
||||||
updated_at: updateTime
|
|
||||||
});
|
|
||||||
});
|
|
||||||
});
|
|
||||||
|
|
||||||
return {
|
|
||||||
updatedCount,
|
|
||||||
totalTickers: tickers.length
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
async function processAnalyzeFiling(task: Task) {
|
|
||||||
const accessionNumber = typeof task.payload.accessionNumber === 'string'
|
|
||||||
? task.payload.accessionNumber
|
|
||||||
: '';
|
|
||||||
|
|
||||||
if (!accessionNumber) {
|
|
||||||
throw new Error('accessionNumber is required');
|
|
||||||
}
|
|
||||||
|
|
||||||
const snapshot = await getStoreSnapshot();
|
|
||||||
const filing = snapshot.filings.find((entry) => entry.accession_number === accessionNumber);
|
|
||||||
|
|
||||||
if (!filing) {
|
|
||||||
throw new Error(`Filing ${accessionNumber} not found`);
|
|
||||||
}
|
|
||||||
|
|
||||||
const prompt = [
|
|
||||||
'You are a fiscal research assistant focused on regulatory signals.',
|
|
||||||
`Analyze this SEC filing from ${filing.company_name} (${filing.ticker}).`,
|
|
||||||
`Form: ${filing.filing_type}`,
|
|
||||||
`Filed: ${filing.filing_date}`,
|
|
||||||
`Metrics: ${JSON.stringify(filing.metrics ?? {})}`,
|
|
||||||
'Return concise sections: Thesis, Red Flags, Follow-up Questions, Portfolio Impact.'
|
|
||||||
].join('\n');
|
|
||||||
|
|
||||||
const analysis = await runOpenClawAnalysis(prompt, 'Use concise institutional analyst language.');
|
|
||||||
|
|
||||||
await withStore((store) => {
|
|
||||||
const index = store.filings.findIndex((entry) => entry.accession_number === accessionNumber);
|
|
||||||
if (index < 0) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
store.filings[index] = {
|
|
||||||
...store.filings[index],
|
|
||||||
analysis: {
|
|
||||||
provider: analysis.provider,
|
|
||||||
model: analysis.model,
|
|
||||||
text: analysis.text
|
|
||||||
},
|
|
||||||
updated_at: nowIso()
|
|
||||||
};
|
|
||||||
});
|
|
||||||
|
|
||||||
return {
|
|
||||||
accessionNumber,
|
|
||||||
provider: analysis.provider,
|
|
||||||
model: analysis.model
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
function holdingDigest(holdings: Holding[]) {
|
|
||||||
return holdings.map((holding) => ({
|
|
||||||
ticker: holding.ticker,
|
|
||||||
shares: holding.shares,
|
|
||||||
avgCost: holding.avg_cost,
|
|
||||||
currentPrice: holding.current_price,
|
|
||||||
marketValue: holding.market_value,
|
|
||||||
gainLoss: holding.gain_loss,
|
|
||||||
gainLossPct: holding.gain_loss_pct
|
|
||||||
}));
|
|
||||||
}
|
|
||||||
|
|
||||||
async function processPortfolioInsights(task: Task) {
|
|
||||||
const userId = task.user_id;
|
|
||||||
if (!userId) {
|
|
||||||
throw new Error('Task is missing user scope');
|
|
||||||
}
|
|
||||||
|
|
||||||
const snapshot = await getStoreSnapshot();
|
|
||||||
const userHoldings = snapshot.holdings.filter((holding) => holding.user_id === userId);
|
|
||||||
const summary = buildPortfolioSummary(userHoldings);
|
|
||||||
|
|
||||||
const prompt = [
|
|
||||||
'Generate portfolio intelligence with actionable recommendations.',
|
|
||||||
`Portfolio summary: ${JSON.stringify(summary)}`,
|
|
||||||
`Holdings: ${JSON.stringify(holdingDigest(userHoldings))}`,
|
|
||||||
'Respond with: 1) health score (0-100), 2) top 3 risks, 3) top 3 opportunities, 4) next actions in 7 days.'
|
|
||||||
].join('\n');
|
|
||||||
|
|
||||||
const analysis = await runOpenClawAnalysis(prompt, 'Act as a risk-aware buy-side analyst.');
|
|
||||||
const createdAt = nowIso();
|
|
||||||
|
|
||||||
await withStore((store) => {
|
|
||||||
store.counters.insights += 1;
|
|
||||||
|
|
||||||
const insight: PortfolioInsight = {
|
|
||||||
id: store.counters.insights,
|
|
||||||
user_id: userId,
|
|
||||||
provider: analysis.provider,
|
|
||||||
model: analysis.model,
|
|
||||||
content: analysis.text,
|
|
||||||
created_at: createdAt
|
|
||||||
};
|
|
||||||
|
|
||||||
store.insights.unshift(insight);
|
|
||||||
});
|
|
||||||
|
|
||||||
return {
|
|
||||||
provider: analysis.provider,
|
|
||||||
model: analysis.model,
|
|
||||||
summary
|
|
||||||
};
|
|
||||||
}
|
|
||||||
|
|
||||||
async function runTaskProcessor(task: Task) {
|
|
||||||
switch (task.task_type) {
|
|
||||||
case 'sync_filings':
|
|
||||||
return await processSyncFilings(task);
|
|
||||||
case 'refresh_prices':
|
|
||||||
return await processRefreshPrices(task);
|
|
||||||
case 'analyze_filing':
|
|
||||||
return await processAnalyzeFiling(task);
|
|
||||||
case 'portfolio_insights':
|
|
||||||
return await processPortfolioInsights(task);
|
|
||||||
default:
|
|
||||||
throw new Error(`Unsupported task type: ${task.task_type}`);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
async function processTask(taskId: string) {
|
|
||||||
if (activeTaskRuns.has(taskId)) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
activeTaskRuns.add(taskId);
|
|
||||||
|
|
||||||
try {
|
|
||||||
const task = await withStore((store) => {
|
|
||||||
const index = store.tasks.findIndex((entry) => entry.id === taskId);
|
|
||||||
|
|
||||||
if (index < 0) {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
const target = store.tasks[index];
|
|
||||||
if (target.status !== 'queued') {
|
|
||||||
return null;
|
|
||||||
}
|
|
||||||
|
|
||||||
target.status = 'running';
|
|
||||||
target.attempts += 1;
|
|
||||||
target.updated_at = nowIso();
|
|
||||||
|
|
||||||
return { ...target };
|
|
||||||
});
|
|
||||||
|
|
||||||
if (!task) {
|
|
||||||
return;
|
|
||||||
}
|
|
||||||
|
|
||||||
try {
|
|
||||||
const result = toTaskResult(await runTaskProcessor(task));
|
|
||||||
|
|
||||||
await markTask(taskId, (target) => {
|
|
||||||
target.status = 'completed';
|
|
||||||
target.result = result;
|
|
||||||
target.error = null;
|
|
||||||
target.finished_at = nowIso();
|
|
||||||
});
|
|
||||||
} catch (error) {
|
|
||||||
const reason = error instanceof Error ? error.message : 'Task failed unexpectedly';
|
|
||||||
const shouldRetry = task.attempts < task.max_attempts;
|
|
||||||
|
|
||||||
if (shouldRetry) {
|
|
||||||
await markTask(taskId, (target) => {
|
|
||||||
target.status = 'queued';
|
|
||||||
target.error = reason;
|
|
||||||
});
|
|
||||||
|
|
||||||
queueTaskRun(taskId, 1200);
|
|
||||||
} else {
|
|
||||||
await markTask(taskId, (target) => {
|
|
||||||
target.status = 'failed';
|
|
||||||
target.error = reason;
|
|
||||||
target.finished_at = nowIso();
|
|
||||||
});
|
|
||||||
}
|
|
||||||
}
|
|
||||||
} finally {
|
|
||||||
activeTaskRuns.delete(taskId);
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
export async function enqueueTask(input: EnqueueTaskInput) {
|
export async function enqueueTask(input: EnqueueTaskInput) {
|
||||||
const createdAt = nowIso();
|
const task = await createTaskRunRecord({
|
||||||
|
|
||||||
const task: Task = {
|
|
||||||
id: randomUUID(),
|
id: randomUUID(),
|
||||||
user_id: input.userId,
|
user_id: input.userId,
|
||||||
task_type: input.taskType,
|
task_type: input.taskType,
|
||||||
status: 'queued',
|
|
||||||
priority: input.priority ?? 50,
|
|
||||||
payload: input.payload ?? {},
|
payload: input.payload ?? {},
|
||||||
result: null,
|
priority: input.priority ?? 50,
|
||||||
error: null,
|
max_attempts: input.maxAttempts ?? 3
|
||||||
attempts: 0,
|
|
||||||
max_attempts: input.maxAttempts ?? 3,
|
|
||||||
created_at: createdAt,
|
|
||||||
updated_at: createdAt,
|
|
||||||
finished_at: null
|
|
||||||
};
|
|
||||||
|
|
||||||
await withStore((store) => {
|
|
||||||
store.tasks.unshift(task);
|
|
||||||
store.tasks.sort((a, b) => {
|
|
||||||
if (a.priority !== b.priority) {
|
|
||||||
return b.priority - a.priority;
|
|
||||||
}
|
|
||||||
|
|
||||||
return Date.parse(b.created_at) - Date.parse(a.created_at);
|
|
||||||
});
|
|
||||||
});
|
});
|
||||||
|
|
||||||
queueTaskRun(task.id);
|
try {
|
||||||
return task;
|
const run = await start(runTaskWorkflow, [task.id]);
|
||||||
|
await setTaskWorkflowRunId(task.id, run.runId);
|
||||||
|
|
||||||
|
return {
|
||||||
|
...task,
|
||||||
|
workflow_run_id: run.runId
|
||||||
|
} satisfies Task;
|
||||||
|
} catch (error) {
|
||||||
|
const reason = error instanceof Error
|
||||||
|
? error.message
|
||||||
|
: 'Failed to start workflow';
|
||||||
|
await markTaskFailure(task.id, reason);
|
||||||
|
throw error;
|
||||||
|
}
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function getTaskById(taskId: string, userId: string) {
|
export async function getTaskById(taskId: string, userId: string) {
|
||||||
const snapshot = await getStoreSnapshot();
|
return await getTaskByIdForUser(taskId, userId);
|
||||||
return snapshot.tasks.find((task) => task.id === taskId && task.user_id === userId) ?? null;
|
|
||||||
}
|
}
|
||||||
|
|
||||||
export async function listRecentTasks(userId: string, limit = 20, statuses?: TaskStatus[]) {
|
export async function listRecentTasks(userId: string, limit = 20, statuses?: TaskStatus[]) {
|
||||||
const safeLimit = Math.min(Math.max(Math.trunc(limit), 1), 200);
|
return await listRecentTasksForUser(userId, limit, statuses);
|
||||||
const snapshot = await getStoreSnapshot();
|
}
|
||||||
const scoped = snapshot.tasks.filter((task) => task.user_id === userId);
|
|
||||||
|
export async function getTaskQueueSnapshot() {
|
||||||
const filtered = statuses && statuses.length > 0
|
return await countTasksByStatus();
|
||||||
? scoped.filter((task) => statuses.includes(task.status))
|
|
||||||
: scoped;
|
|
||||||
|
|
||||||
return filtered
|
|
||||||
.slice()
|
|
||||||
.sort((a, b) => Date.parse(b.created_at) - Date.parse(a.created_at))
|
|
||||||
.slice(0, safeLimit);
|
|
||||||
}
|
}
|
||||||
|
|||||||
@@ -46,6 +46,8 @@ export type Filing = {
|
|||||||
cik: string;
|
cik: string;
|
||||||
company_name: string;
|
company_name: string;
|
||||||
filing_url: string | null;
|
filing_url: string | null;
|
||||||
|
submission_url?: string | null;
|
||||||
|
primary_document?: string | null;
|
||||||
metrics: {
|
metrics: {
|
||||||
revenue: number | null;
|
revenue: number | null;
|
||||||
netIncome: number | null;
|
netIncome: number | null;
|
||||||
@@ -77,6 +79,7 @@ export type Task = {
|
|||||||
error: string | null;
|
error: string | null;
|
||||||
attempts: number;
|
attempts: number;
|
||||||
max_attempts: number;
|
max_attempts: number;
|
||||||
|
workflow_run_id?: string | null;
|
||||||
created_at: string;
|
created_at: string;
|
||||||
updated_at: string;
|
updated_at: string;
|
||||||
finished_at: string | null;
|
finished_at: string | null;
|
||||||
|
|||||||
@@ -1,3 +1,5 @@
|
|||||||
|
const { withWorkflow } = require('workflow/next');
|
||||||
|
|
||||||
/** @type {import('next').NextConfig} */
|
/** @type {import('next').NextConfig} */
|
||||||
const nextConfig = {
|
const nextConfig = {
|
||||||
reactStrictMode: true,
|
reactStrictMode: true,
|
||||||
@@ -20,6 +22,6 @@ const nextConfig = {
|
|||||||
}
|
}
|
||||||
];
|
];
|
||||||
}
|
}
|
||||||
}
|
};
|
||||||
|
|
||||||
module.exports = nextConfig;
|
module.exports = withWorkflow(nextConfig);
|
||||||
|
|||||||
@@ -9,10 +9,13 @@
|
|||||||
"start": "bun --bun next start",
|
"start": "bun --bun next start",
|
||||||
"lint": "bun --bun tsc --noEmit",
|
"lint": "bun --bun tsc --noEmit",
|
||||||
"db:generate": "bun x drizzle-kit generate",
|
"db:generate": "bun x drizzle-kit generate",
|
||||||
"db:migrate": "bun x drizzle-kit migrate"
|
"db:migrate": "bun x drizzle-kit migrate",
|
||||||
|
"workflow:setup": "bun x workflow-postgres-setup"
|
||||||
},
|
},
|
||||||
"dependencies": {
|
"dependencies": {
|
||||||
|
"@elysiajs/eden": "^1.4.8",
|
||||||
"@tailwindcss/postcss": "^4.2.1",
|
"@tailwindcss/postcss": "^4.2.1",
|
||||||
|
"@workflow/world-postgres": "^4.1.0-beta.36",
|
||||||
"better-auth": "^1.4.19",
|
"better-auth": "^1.4.19",
|
||||||
"clsx": "^2.1.1",
|
"clsx": "^2.1.1",
|
||||||
"date-fns": "^4.1.0",
|
"date-fns": "^4.1.0",
|
||||||
@@ -23,7 +26,8 @@
|
|||||||
"pg": "^8.18.0",
|
"pg": "^8.18.0",
|
||||||
"react": "^19.2.4",
|
"react": "^19.2.4",
|
||||||
"react-dom": "^19.2.4",
|
"react-dom": "^19.2.4",
|
||||||
"recharts": "^3.7.0"
|
"recharts": "^3.7.0",
|
||||||
|
"workflow": "^4.1.0-beta.60"
|
||||||
},
|
},
|
||||||
"devDependencies": {
|
"devDependencies": {
|
||||||
"@types/node": "^25.3.0",
|
"@types/node": "^25.3.0",
|
||||||
|
|||||||
Reference in New Issue
Block a user