From 04e5caf4e15d538789254261d766d84550d649d3 Mon Sep 17 00:00:00 2001 From: francy51 Date: Mon, 23 Feb 2026 21:10:39 -0500 Subject: [PATCH] feat: rebuild fiscal clone architecture and harden coolify deployment --- .env.example | 22 +- BETTER_AUTH_MIGRATION.md | 179 +----- COOLIFY.md | 153 ++--- DIRECT_COOLIFY_DEPLOYMENT.md | 556 +----------------- README.md | 412 +++---------- backend/Dockerfile | 11 +- backend/docker-compose.yml | 75 ++- backend/package.json | 12 +- backend/src/auth.ts | 44 +- backend/src/better-auth-migrate.ts | 109 ---- backend/src/config.ts | 47 ++ backend/src/db/index.ts | 50 +- backend/src/db/migrate.ts | 287 ++++++--- backend/src/index.ts | 65 +- backend/src/routes/auth.ts | 122 ---- backend/src/routes/better-auth.ts | 6 +- backend/src/routes/error.ts | 16 + backend/src/routes/filings.ts | 136 +++-- backend/src/routes/me.ts | 13 + backend/src/routes/openclaw.ts | 156 ++--- backend/src/routes/portfolio.ts | 224 +++++-- backend/src/routes/tasks.ts | 40 ++ backend/src/routes/watchlist.ts | 87 ++- backend/src/services/openclaw.ts | 61 ++ backend/src/services/prices.ts | 138 ++--- backend/src/services/sec.ts | 318 +++++----- backend/src/session.ts | 30 + backend/src/tasks/processors.ts | 201 +++++++ backend/src/tasks/repository.ts | 168 ++++++ backend/src/tasks/worker-loop.ts | 52 ++ backend/src/types.ts | 78 +++ backend/src/worker.ts | 19 + docker-compose.yml | 53 +- docs/REBUILD_DECISIONS.md | 78 +++ frontend/app/auth/signin/page.tsx | 132 ++--- frontend/app/auth/signup/page.tsx | 147 ++--- frontend/app/filings/page.tsx | 346 ++++++----- frontend/app/globals.css | 128 +++- frontend/app/layout.tsx | 31 +- frontend/app/page.tsx | 299 +++++++--- frontend/app/portfolio/page.tsx | 545 +++++++++-------- frontend/app/watchlist/page.tsx | 335 +++++------ frontend/components/auth/auth-shell.tsx | 45 ++ frontend/components/dashboard/metric-card.tsx | 23 + frontend/components/dashboard/task-feed.tsx | 36 ++ frontend/components/shell/app-shell.tsx | 128 ++++ frontend/components/ui/button.tsx | 27 + frontend/components/ui/input.tsx | 15 + frontend/components/ui/panel.tsx | 31 + frontend/components/ui/status-pill.tsx | 21 + frontend/hooks/use-auth-guard.ts | 22 + frontend/hooks/use-task-poller.ts | 59 ++ frontend/lib/api.ts | 144 +++++ frontend/lib/auth.ts | 12 - frontend/lib/better-auth.ts | 8 +- frontend/lib/format.ts | 29 + frontend/lib/runtime-url.ts | 45 ++ frontend/lib/types.ts | 90 +++ frontend/lib/utils.ts | 7 +- frontend/next.config.js | 14 +- frontend/package.json | 12 +- 61 files changed, 3826 insertions(+), 2923 deletions(-) delete mode 100644 backend/src/better-auth-migrate.ts create mode 100644 backend/src/config.ts delete mode 100644 backend/src/routes/auth.ts create mode 100644 backend/src/routes/error.ts create mode 100644 backend/src/routes/me.ts create mode 100644 backend/src/routes/tasks.ts create mode 100644 backend/src/services/openclaw.ts create mode 100644 backend/src/session.ts create mode 100644 backend/src/tasks/processors.ts create mode 100644 backend/src/tasks/repository.ts create mode 100644 backend/src/tasks/worker-loop.ts create mode 100644 backend/src/types.ts create mode 100644 backend/src/worker.ts create mode 100644 docs/REBUILD_DECISIONS.md create mode 100644 frontend/components/auth/auth-shell.tsx create mode 100644 frontend/components/dashboard/metric-card.tsx create mode 100644 frontend/components/dashboard/task-feed.tsx create mode 100644 frontend/components/shell/app-shell.tsx create mode 100644 frontend/components/ui/button.tsx create mode 100644 frontend/components/ui/input.tsx create mode 100644 frontend/components/ui/panel.tsx create mode 100644 frontend/components/ui/status-pill.tsx create mode 100644 frontend/hooks/use-auth-guard.ts create mode 100644 frontend/hooks/use-task-poller.ts create mode 100644 frontend/lib/api.ts delete mode 100644 frontend/lib/auth.ts create mode 100644 frontend/lib/format.ts create mode 100644 frontend/lib/runtime-url.ts create mode 100644 frontend/lib/types.ts diff --git a/.env.example b/.env.example index 8f9bde5..f295f2b 100644 --- a/.env.example +++ b/.env.example @@ -1,20 +1,28 @@ -# Database +# PostgreSQL DATABASE_URL=postgres://postgres:postgres@localhost:5432/fiscal POSTGRES_USER=postgres POSTGRES_PASSWORD=postgres POSTGRES_DB=fiscal POSTGRES_HOST=localhost -# Backend +# API service PORT=3001 NODE_ENV=development -JWT_SECRET=change-this-to-a-random-secret-key -BETTER_AUTH_SECRET=change-this-to-a-random-secret-key -BETTER_AUTH_BASE_URL=http://localhost:3001 FRONTEND_URL=http://localhost:3000 +BETTER_AUTH_SECRET=replace-with-strong-random-secret +BETTER_AUTH_BASE_URL=http://localhost:3001 +SEC_USER_AGENT=Fiscal Clone # Frontend NEXT_PUBLIC_API_URL=http://localhost:3001 +# In Coolify this must be the public backend URL (e.g. https://api.fiscal.example.com) -# OpenClaw Integration -OPENCLAW_WEBHOOK_URL=https://discord.com/api/webhooks/... +# OpenClaw / ZeroClaw (OpenAI-compatible) +OPENCLAW_BASE_URL=http://localhost:4000 +OPENCLAW_API_KEY=replace-with-your-agent-key +OPENCLAW_MODEL=zeroclaw + +# Queue tuning +TASK_HEARTBEAT_SECONDS=15 +TASK_STALE_SECONDS=120 +TASK_MAX_ATTEMPTS=3 diff --git a/BETTER_AUTH_MIGRATION.md b/BETTER_AUTH_MIGRATION.md index 3a8cb54..0d33e8d 100644 --- a/BETTER_AUTH_MIGRATION.md +++ b/BETTER_AUTH_MIGRATION.md @@ -1,174 +1,9 @@ -# Better Auth Migration +# Better Auth Migration (Archived) -## Overview -Migrated from NextAuth v5 (beta) to Better Auth for unified authentication across both Elysia (backend) and Next.js (frontend). +This document described the pre-2.0 incremental migration path. -## Backend Changes - -### Installation -- Added `better-auth@1.4.18` package -- Added `pg@8.18.0` for PostgreSQL connection pool - -### New Files -- `backend/src/auth.ts` - Better Auth instance configuration -- `backend/src/routes/better-auth.ts` - Route handler for auth endpoints -- `backend/src/better-auth-migrate.ts` - Database migration script - -### Modified Files -- `backend/src/index.ts` - Replaced custom auth routes with Better Auth routes -- Removed custom JWT-based authentication routes (`backend/src/routes/auth.ts` can be deleted after migration) - -### Database Schema -New tables added: -- `session` - Session management -- `account` - OAuth/credential account storage -- `verification` - Email verification tokens - -Existing `users` table extended with: -- `email_verified` (BOOLEAN) -- `image` (TEXT) - -### Migration Steps -1. Run the migration script: - ```bash - cd backend - bun run src/better-auth-migrate.ts - ``` - -2. Set environment variables: - ```env - BETTER_AUTH_SECRET= - BETTER_AUTH_URL=http://localhost:3001 - ``` - -## Frontend Changes - -### Installation -- Added `better-auth` package (includes React client) - -### New Files -- `frontend/lib/better-auth.ts` - Better Auth client instance - -### Modified Files -- `frontend/lib/auth.ts` - Updated to use Better Auth session -- `frontend/app/auth/signin/page.tsx` - Updated to use Better Auth methods -- `frontend/app/auth/signup/page.tsx` - Updated to use Better Auth methods -- Removed `frontend/app/api/auth/[...nextauth]/route.ts` - No longer needed - -### Authentication Methods -Better Auth supports: -- Email/password (`signIn.email`, `signUp.email`) -- OAuth providers (`signIn.social`): - - GitHub (`signIn.social({ provider: 'github' })`) - - Google (`signIn.social({ provider: 'google' })`) - -### Session Management -```typescript -import { useSession } from '@/lib/better-auth'; - -// In client components -const { data: session } = useSession(); -``` - -```typescript -import { authClient } from '@/lib/better-auth'; - -// In server components -const { data: session } = await authClient.getSession(); -``` - -## Environment Variables - -### Required -```env -# Backend -DATABASE_URL=postgres://user:password@localhost:5432/fiscal -BETTER_AUTH_SECRET=<32+ character random string> -BETTER_AUTH_URL=http://localhost:3001 - -# OAuth Providers -GITHUB_ID= -GITHUB_SECRET= -GOOGLE_ID= -GOOGLE_SECRET= - -# Frontend -NEXT_PUBLIC_API_URL=http://localhost:3001 -``` - -## API Endpoints - -Better Auth provides these endpoints automatically (mounted at `/api/auth/*`): - -### Email/Password -- `POST /api/auth/sign-up/email` - Sign up with email -- `POST /api/auth/sign-in/email` - Sign in with email -- `GET /api/auth/get-session` - Get current session -- `POST /api/auth/sign-out` - Sign out - -### OAuth -- `GET /api/auth/sign-in/social` - Initiate OAuth flow -- `GET /api/auth/callback/*` - OAuth callback handler - -### Session -- `GET /api/auth/get-session` - Get current session -- `POST /api/auth/update-session` - Update session data - -## Key Differences from NextAuth - -### NextAuth -- Configuration in route handler (`app/api/auth/[...nextauth]/route.ts`) -- Server-side session management with JWT -- Custom callback for session/user data -- Requires `signIn()` and `signOut()` from `next-auth/react` - -### Better Auth -- Configuration in separate file (`backend/src/auth.ts`) -- Server and client components unified API -- Built-in session management with database storage -- `signIn.email`, `signIn.social`, `signOut` from `better-auth/react` -- Direct database access for user/session data - -## Testing Checklist - -- [ ] Run database migration: `bun run src/better-auth-migrate.ts` -- [ ] Start backend server -- [ ] Test email/password signup -- [ ] Test email/password login -- [ ] Test GitHub OAuth -- [ ] Test Google OAuth -- [ ] Test sign out -- [ ] Test protected routes redirect to sign in -- [ ] Test session persistence across page refreshes - -## Rollback Plan - -If issues arise, revert to NextAuth: -1. Restore `frontend/app/api/auth/[...nextauth]/route.ts` -2. Restore `frontend/app/auth/signin/page.tsx` and `frontend/app/auth/signup/page.tsx` -3. Restore `frontend/lib/auth.ts` -4. Remove `backend/src/auth.ts` and `backend/src/routes/better-auth.ts` -5. Restore custom auth routes in backend - -## Benefits of Better Auth - -1. **Unified Auth** - Single auth system for both backend and frontend -2. **Type Safety** - Better TypeScript support -3. **Database-Backed Sessions** - More secure than JWT -4. **Extensible** - Plugin system for 2FA, email verification, etc. -5. **Active Development** - More frequent updates and improvements -6. **Framework Agnostic** - Works with any backend framework - -## Future Improvements - -1. Enable email verification (Better Auth plugin) -2. Add two-factor authentication (Better Auth plugin) -3. Implement account management (password reset, email change) -4. Add session management UI (view active sessions, revoke) -5. Implement role-based access control (Better Auth plugin) - -## Resources - -- Better Auth Docs: https://www.better-auth.com/ -- Better Auth GitHub: https://github.com/better-auth/better-auth -- Migration Guide: https://www.better-auth.com/docs/migration +The codebase has been rebuilt for Fiscal Clone 2.0. Use these sources instead: +- `README.md` for runtime and setup +- `backend/src/auth.ts` for Better Auth configuration +- `backend/src/db/migrate.ts` for current schema +- `docs/REBUILD_DECISIONS.md` for architecture rationale diff --git a/COOLIFY.md b/COOLIFY.md index b796275..a4ff781 100644 --- a/COOLIFY.md +++ b/COOLIFY.md @@ -1,113 +1,78 @@ -# Coolify Deployment Guide +# Coolify Deployment (Fiscal Clone 2.0) -This project is ready for deployment on Coolify. +This repository is deployable on Coolify using the root `docker-compose.yml`. -## Prerequisites +## What gets deployed -1. Coolify instance running -2. GitHub repository with this code -3. PostgreSQL database +- `frontend` (Next.js) +- `backend` (Elysia API + Better Auth) +- `worker` (durable async job processor) +- `postgres` (database) -## Deployment Steps +`backend` and `worker` auto-run migrations on startup: +- `bun run src/db/migrate.ts` +- then start API/worker process -### Option 1: Single Docker Compose App +## Coolify setup -1. Create a new Docker Compose application in Coolify -2. Connect your GitHub repository -3. Select the `docker-compose.yml` file in the root -4. Configure environment variables: +1. Create a **Docker Compose** app in Coolify. +2. Connect this repository. +3. Use compose file: `/docker-compose.yml`. +4. Add public domains: +- `frontend` service on port `3000` (example: `https://fiscal.example.com`) +- `backend` service on port `3001` (example: `https://api.fiscal.example.com`) -``` -DATABASE_URL=postgres://postgres:your_password@postgres:5432/fiscal +## Required environment variables + +Set these in Coolify before deploy: + +```env POSTGRES_USER=postgres -POSTGRES_PASSWORD=your_password +POSTGRES_PASSWORD= POSTGRES_DB=fiscal -PORT=3001 -BETTER_AUTH_SECRET=your-random-long-secret -BETTER_AUTH_BASE_URL=https://api.your-fiscal-domain.com -JWT_SECRET=your-jwt-secret-key-min-32-characters -GITHUB_ID=your-github-oauth-client-id -GITHUB_SECRET=your-github-oauth-client-secret -GOOGLE_ID=your-google-oauth-client-id -GOOGLE_SECRET=your-google-oauth-client-secret -NEXT_PUBLIC_API_URL=https://api.your-fiscal-domain.com + +DATABASE_URL=postgres://postgres:@postgres:5432/fiscal + +# Public URLs +FRONTEND_URL=https://fiscal.example.com +BETTER_AUTH_BASE_URL=https://api.fiscal.example.com +NEXT_PUBLIC_API_URL=https://api.fiscal.example.com + +# Security +BETTER_AUTH_SECRET= +SEC_USER_AGENT=Fiscal Clone + +# Optional OpenClaw/ZeroClaw integration +OPENCLAW_BASE_URL=https://your-openclaw-endpoint +OPENCLAW_API_KEY= +OPENCLAW_MODEL=zeroclaw + +# Optional queue tuning +TASK_HEARTBEAT_SECONDS=15 +TASK_STALE_SECONDS=120 +TASK_MAX_ATTEMPTS=3 ``` -5. Deploy +## Important build note -### Option 2: Separate Applications +`NEXT_PUBLIC_API_URL` is compiled into the frontend bundle at build time. If you change it, trigger a new deploy/rebuild. -#### Backend +The frontend includes a safety fallback: if `NEXT_PUBLIC_API_URL` is accidentally set to an internal host like `http://backend:3001`, browser calls will fall back to `https://api.`. +This is a fallback only; keep `NEXT_PUBLIC_API_URL` correct in Coolify. -1. Create a new application in Coolify -2. Source: GitHub -3. Branch: `main` -4. Build Context: `/backend` -5. Build Pack: `Dockerfile` -6. Environment Variables: - ``` - DATABASE_URL=postgres://... - PORT=3001 - ``` -7. Deploy - -#### Frontend - -1. Create a new application in Coolify -2. Source: GitHub -3. Branch: `main` -4. Build Context: `/frontend` -5. Build Pack: `Dockerfile` -6. Environment Variables: - ``` - NEXT_PUBLIC_API_URL=https://your-backend-domain.com - ``` -7. Deploy - -## Environment Variables - -### Backend -- `DATABASE_URL` - PostgreSQL connection string -- `PORT` - Server port (default: 3001) -- `NODE_ENV` - Environment (development/production) -- `BETTER_AUTH_SECRET` - Required in production; use a long random secret -- `BETTER_AUTH_BASE_URL` - Public backend URL used for auth callbacks - -### Frontend -- `NEXT_PUBLIC_API_URL` - Backend API URL - -`NEXT_PUBLIC_API_URL` is used at image build time in the frontend Docker build. -Set it in Coolify before deploying so the generated client bundle points to the correct backend URL. - -## Database Setup - -The application will automatically create the database schema on startup. To manually run migrations: +## Post-deploy checks +1. API health: ```bash -docker exec -it bun run db:migrate +curl -f https://api.fiscal.example.com/api/health ``` +2. Frontend loads and auth screens render. +3. Create user, add watchlist symbol, queue filing sync. +4. Confirm background tasks move `queued -> running -> completed` in dashboard. -## Monitoring +## Common pitfalls -Once deployed, add the application to OpenClaw's monitoring: - -1. Add to `/data/workspace/memory/coolify-integration.md` -2. Set up Discord alerts for critical issues -3. Configure cron jobs for health checks - -## Troubleshooting - -### Database Connection Failed -- Check DATABASE_URL is correct -- Ensure PostgreSQL container is running -- Verify network connectivity - -### Frontend Can't Connect to Backend -- Verify NEXT_PUBLIC_API_URL points to backend -- Check CORS settings in backend -- Ensure both containers are on same network - -### Cron Jobs Not Running -- Check Elysia cron configuration -- Verify timezone settings -- Check logs for errors +- `NEXT_PUBLIC_API_URL` left as internal hostname (`http://backend:3001`) causes auth/API failures until fallback or proper config is applied. +- `FRONTEND_URL` missing/incorrect causes CORS/session issues. +- `BETTER_AUTH_BASE_URL` must be the public backend URL, not the internal container hostname. +- Deploying frontend and backend on unrelated domains can cause cookie/session headaches. Prefer same root domain (e.g. `fiscal.example.com` + `api.fiscal.example.com`). diff --git a/DIRECT_COOLIFY_DEPLOYMENT.md b/DIRECT_COOLIFY_DEPLOYMENT.md index 5016a16..10d49ba 100644 --- a/DIRECT_COOLIFY_DEPLOYMENT.md +++ b/DIRECT_COOLIFY_DEPLOYMENT.md @@ -1,555 +1,5 @@ -# Fiscal Clone - Direct Coolify Deployment +# Direct Coolify Deployment -Bypassing Gitea for direct Coolify deployment! +Use `/Users/francescobrassesco/Coding/fiscal clone/fiscal-clone/COOLIFY.md` as the canonical deployment guide for Fiscal Clone 2.0. -## Deployment Strategy - -**Method:** File Upload (Simplest & Most Reliable) - -### Why Direct to Coolify? - -1. ✅ No Git repository issues -2. ✅ No SSL certificate problems -3. ✅ No authentication failures -4. ✅ Fast deployment -5. ✅ Easy rollback -6. ✅ Built-in environment variable management - -## Deployment Steps - -### Step 1: Prepare Files - -**Already Ready:** -- Fiscal Clone code: `/data/workspace/fiscal-clone/` -- Docker Compose configured -- All features implemented - -### Step 2: Deploy to Coolify - -**In Coolify Dashboard:** - -1. **Create New Application** - - Type: Docker Compose - - Name: `fiscal-clone-full-stack` - - Source: **File Upload** - -2. **Upload Files** - - Create zip/tarball of `fiscal-clone/` folder - - Or select folder if Coolify supports directory upload - - Upload all files - -3. **Configure Build Context** - - Build Context: `/` - - Docker Compose File: `docker-compose.yml` - -4. **Configure Domains** - - **Frontend:** `fiscal.b11studio.xyz` - - **Backend API:** `api.fiscal.b11studio.xyz` - -5. **Configure Environment Variables** - -**Required Variables:** -```bash -DATABASE_URL=postgres://postgres:your-secure-password@postgres:5432/fiscal -POSTGRES_USER=postgres -POSTGRES_PASSWORD=your-secure-password -POSTGRES_DB=fiscal - -JWT_SECRET=your-jwt-secret-key-min-32-characters - -NEXT_PUBLIC_API_URL=http://backend:3001 -``` - -**Optional OAuth Variables:** -```bash -# GitHub OAuth -GITHUB_ID=your-github-oauth-client-id -GITHUB_SECRET=your-github-oauth-client-secret - -# Google OAuth -GOOGLE_ID=your-google-oauth-client-id -GOOGLE_SECRET=your-google-oauth-client-secret -``` - -6. **Deploy!** - - Click "Deploy" button - - Monitor deployment logs in Coolify - -### Step 3: First Access - -**After Deployment:** - -1. **Access Frontend:** https://fiscal.b11studio.xyz -2. **Create Account:** - - Click "Sign Up" - - Enter email, name, password - - Click "Create Account" - -3. **Login:** Use your new account to log in - -4. **Add to Watchlist:** - - Go to "Watchlist" - - Add a stock ticker (e.g., AAPL) - - Wait for SEC filings to be fetched - -5. **Add to Portfolio:** - - Go to "Portfolio" - - Add a holding - - Enter ticker, shares, average cost - -### Step 4: Database Migrations - -**Automatic or Manual:** - -The database schema should automatically create on first run, but you can manually run migrations if needed: - -**In Coolify Terminal:** -```bash -# Access backend container -docker exec -it sh - -# Run migrations -bun run db:migrate -``` - -## Architecture - -``` -Coolify Server -└── Fiscal Clone Application - ├── Frontend (Next.js 14) - │ ├── Domain: https://fiscal.b11studio.xyz - │ ├── Routes: /, /auth/*, /api/*, /portfolio, /filings, /watchlist - │ └── Environment: NEXT_PUBLIC_API_URL - ├── Backend (Elysia.js + Bun) - │ ├── Port: 3001 - │ ├── Routes: /api/*, /api/auth/* - │ └── Environment: DATABASE_URL, JWT_SECRET, etc. - └── Database (PostgreSQL 16) - ├── User auth tables - ├── Filings tables - ├── Portfolio tables - └── Watchlist tables -``` - -## Health Checks - -After deployment, verify all services are running: - -### Backend Health -```bash -curl http://api.fiscal.b11studio.xyz/api/health -``` - -**Expected Response:** -```json -{ - "status": "ok", - "timestamp": "2026-02-16T02:31:00.000Z", - "version": "1.0.0", - "database": "connected" -} -``` - -### Frontend Health -```bash -curl https://fiscal.b11studio.xyz -``` - -**Expected:** HTML page loads successfully - -### Database Health -```bash -# Check Coolify dashboard -# PostgreSQL container should show as "healthy" -# All containers should be "running" -``` - -## Troubleshooting - -### Application Won't Start - -**Check Coolify Logs:** -- Navigate to application → Logs -- Look for database connection errors -- Check if PostgreSQL container is healthy -- Verify environment variables are correct - -**Common Issues:** - -1. **Database Connection Failed** - - Error: `connection refused` or `password authentication failed` - - Fix: Verify `DATABASE_URL` and `POSTGRES_PASSWORD` match - - Fix: Ensure PostgreSQL container is running and healthy - -2. **Frontend Can't Connect to Backend** - - Error: `502 Bad Gateway` or `Connection refused` - - Fix: Verify `NEXT_PUBLIC_API_URL` is correct - - Fix: Check if backend is running - - Fix: Verify network connectivity between containers - -3. **Authentication Fails** - - Error: `Invalid token` or `Authentication failed` - - Fix: Generate new `JWT_SECRET` - - Fix: Update backend container to restart - - Fix: Verify OAuth credentials (GitHub/Google) are correct - -4. **Port Conflicts** - - Error: `Address already in use` - - Fix: Coolify automatically assigns ports - - Fix: Check Coolify logs for port conflicts - -### Manual Restart - -If application is in bad state: - -1. In Coolify Dashboard → Application -2. Click "Restart" button -3. Wait for all containers to restart -4. Check logs for errors - -### Rollback to Previous Version - -If deployment breaks functionality: - -1. In Coolify Dashboard → Application -2. Click "Deployments" -3. Select previous successful deployment -4. Click "Rollback" -5. Verify functionality - -## Advanced Configuration - -### Performance Tuning - -**For High Load:** - -In Coolify environment variables for backend: -```bash -# Database connection pooling -POSTGRES_DB_MAX_CONNECTIONS=200 -POSTGRES_DB_MAX_IDLE_CONNECTIONS=50 -POSTGRES_DB_CONNECTION_LIFETIME=1h - -# Worker processes -PORT=3001 -NODE_ENV=production -``` - -**Caching Configuration:** - -Already configured with Redis. Add environment variable: -```bash -# Enable Redis caching -REDIS_ENABLED=true -REDIS_URL=redis://redis:6379 -``` - -### SSL/TLS Configuration - -Coolify automatically handles SSL with Let's Encrypt. Ensure: -- Domain DNS points correctly to Coolify -- Port 80 and 443 are open on VPS firewall -- Coolify Traefik proxy is running - -## Monitoring - -### Coolify Built-in Monitoring - -1. **Application Logs:** View in Coolify Dashboard -2. **Container Logs:** Docker logs for each service -3. **Resource Usage:** CPU, Memory, Disk in Dashboard -4. **Health Checks:** Built-in health endpoints - -### External Monitoring - -Set up uptime monitoring: - -**Services:** -- UptimeRobot -- Pingdom -- StatusCake - -**URLs to Monitor:** -- Frontend: `https://fiscal.b11studio.xyz` -- Backend API: `http://api.fiscal.b11studio.xyz/api/health` -- Health Check: `http://api.fiscal.b11studio.xyz/health` - -### Discord Alerts - -Integrate with your Discord server: - -**Create these webhooks in Coolify:** - -1. **For Deployment Channel:** - - Coolify → Application → Webhooks - - Webhook URL: Your Discord webhook URL - - Events: Deployment status - -2. **For Alert Channel:** - - Webhook URL: Your Discord webhook URL - - Events: Application failures, crashes - -**Or use Coolify's built-in Discord integration** (if available) - -## Backup Strategy - -### Automatic Backups (Coolify) - -Coolify provides: -- ✅ PostgreSQL automated backups -- ✅ Volume persistence across deployments - -### Manual Backups - -**Export Database:** -```bash -# In Coolify terminal -docker exec -it postgres pg_dump -U postgres -d fiscal > backup-$(date +%Y%m%d).sql -``` - -**Import Database:** -```bash -# In Coolify terminal -docker exec -i postgres psql -U postgres -d fiscal < backup-20260215.sql -``` - -**Download Data:** -```bash -# Access backend container -docker exec -it backend tar -czf /tmp/fiscal-backup.tar.gz /app/data - -# Download from container (if Coolify provides file access) -# Or access via Coolify file browser (if available) -``` - -## Scaling - -### Horizontal Scaling - -**In Coolify:** - -1. Application Settings → Resources -2. Increase replicas for frontend -3. Add more backend instances -4. Load balance with Traefik - -### Database Scaling - -**For High Load:** - -1. Use separate PostgreSQL instance -2. Configure connection pooling -3. Use read replicas for queries -4. Optimize indexes - -**Coolify provides:** -- Easy horizontal scaling -- Load balancing with Traefik -- Resource limits per container - -## Security - -### Environment Variables Security - -**Never commit secrets!** - -Use Coolify environment variables for sensitive data: - -❌ DO NOT COMMIT: -- API keys -- Database passwords -- JWT secrets -- OAuth client secrets -- SMTP credentials - -✅ DO USE: -- Coolify environment variables -- Encrypted storage in Coolify -- Separate .env files for local development - -### Password Security - -**Generate Strong Passwords:** - -```bash -# Database password -openssl rand -base64 32 | tr -d '[:alnum:]' - -# JWT secret -openssl rand -base64 32 | tr -d '[:alnum:]' - -# Admin password (if needed) -openssl rand -base64 32 | tr -d '[:alnum:]' -``` - -**Store securely in Coolify:** -- DATABASE_URL=postgres://postgres:@postgres:5432/fiscal -- JWT_SECRET= -- POSTGRES_PASSWORD= - -### Access Control - -**Discord Server:** -- Create private channels for admin discussions -- Limit sensitive information to private channels -- Use role-based access control - -**Coolify:** -- Use Team permissions -- Restrict application access -- Enable 2FA (if available for Coolify account) -- Regular security updates - -### Firewall Rules - -Ensure VPS firewall allows: - -``` -# Inbound -80/tcp - HTTP (Traefik) -443/tcp - HTTPS (Traefik) -22/tcp - SSH (optional) -3000/tcp - Next.js -3001/tcp - Backend API - -# Outbound -All (for Git operations, SEC API, Yahoo Finance) -``` - -## Update Strategy - -### Update Process - -**Current Version:** 1.0.0 - -**To Update:** - -1. **Update Code Locally** - ```bash - cd /data/workspace/fiscal-clone - git pull origin main - # Make changes - git add . - git commit -m "chore: Update Fiscal Clone" - git push origin main - ``` - -2. **Deploy New Version in Coolify** - - Go to Coolify Dashboard → Application - - Click "Deploy" button - - Coolify pulls latest code and rebuilds - -3. **Monitor Deployment** - - Watch logs in Coolify - - Verify all services are healthy - - Test all features - -**Blue-Green Deployment:** - -1. Deploy new version to new Coolify application -2. Switch DNS to new version -3. Verify new version works -4. Delete old version - -### Rollback Strategy - -If new deployment has issues: - -1. In Coolify Dashboard → Deployments -2. Select previous stable version -3. Click "Rollback" -4. DNS switches back automatically - -## CI/CD - -### Coolify Built-in CI/CD - -**Automatic Deployment:** - -Configure webhooks in Coolify: - -**GitHub:** -1. Repository Settings → Webhooks -2. Payload URL: Coolify provides this -3. Events: Push, Pull Request -4. Push to main branch triggers deployment - -**GitLab:** -1. Repository Settings → Integrations -2. Deployments → Create deploy token -3. Configure in Coolify - -**Gitea:** -1. Repository Settings → Webhooks -2. Gitea webhook URL: Coolify provides this -3. Events: Push events - -### Pipeline - -``` -Push Code → GitHub/GitLab/Gitea - ↓ -Coolify Webhook Triggered - ↓ -New Build & Deploy - ↓ -Application Deployed to Coolify - ↓ -Health Checks & Monitoring - ↓ -Live on https://fiscal.b11studio.xyz -``` - -## Documentation - -### Links - -- **Fiscal Clone README:** `/data/workspace/fiscal-clone/README.md` -- **Coolify Docs:** https://docs.coolify.io/ -- **NextAuth Docs:** https://next-auth.js.org/ -- **Elysia Docs:** https://elysiajs.com/ -- **PostgreSQL Docs:** https://www.postgresql.org/docs/ - -### API Documentation - -After deployment, access: -- Swagger UI: `https://api.fiscal.b11studio.xyz/swagger` -- API Docs: `/api/filings`, `/api/portfolio`, etc. - -### Quick Reference - -**Environment Variables:** -```bash -DATABASE_URL=postgres://postgres:password@postgres:5432/fiscal -JWT_SECRET= -NEXT_PUBLIC_API_URL=http://backend:3001 -``` - -**Endpoints:** -- Frontend: https://fiscal.b11studio.xyz -- Backend API: http://api.fiscal.b11studio.xyz -- Health: http://api.fiscal.b11studio.xyz/api/health -- Swagger: https://api.fiscal.b11studio.xyz/swagger -``` - -## Success Criteria - -Deployment is successful when: - -- [ ] All containers are running -- [ ] PostgreSQL is healthy -- [ ] Frontend loads at https://fiscal.b11studio.xyz -- [ ] Backend API responds on /api/health -- [ ] Can create account and login -- [ ] Can add stocks to watchlist -- [ ] Can add holdings to portfolio -- [ ] SEC filings are being fetched -- [ ] Database tables created -- [ ] No errors in logs - ---- - -**Deployment Version:** 2.0 (Direct to Coolify) -**Status:** Ready for deployment -**Last Updated:** 2026-02-16 +This file is retained only as a compatibility entrypoint. diff --git a/README.md b/README.md index 6d0e8c4..53ea51d 100644 --- a/README.md +++ b/README.md @@ -1,367 +1,135 @@ -# Fiscal Clone +# Fiscal Clone 2.0 -Financial filings extraction and portfolio analytics powered by SEC EDGAR. +Ground-up rebuild of a `fiscal.ai`-style platform with: +- Better Auth for session-backed auth +- Next.js frontend +- high-throughput API service +- durable long-running task worker +- OpenClaw/ZeroClaw AI integration +- futuristic terminal UI language -## Features +## Feature Coverage -- **SEC Filings Extraction** - - 10-K, 10-Q, 8-K filings support - - Key metrics extraction (revenue, net income, assets, cash, debt) - - Real-time search and updates - -- **Portfolio Analytics** - - Stock holdings tracking - - Real-time price updates (Yahoo Finance API) - - Automatic P&L calculations - - Performance charts (pie chart allocation, line chart performance) - -- **Watchlist Management** - - Add/remove stocks to watchlist - - Track company and sector information - - Quick access to filings and portfolio - -- **Authentication** - - NextAuth.js with multiple providers - - GitHub, Google OAuth, Email/Password - - JWT-based session management with 30-day expiration - -- **OpenClaw Integration** - - AI portfolio insights - - AI filing analysis - - Discord notification endpoints +- Authentication (email/password via Better Auth) +- Watchlist management +- SEC filings ingestion (10-K, 10-Q, 8-K) +- Filing analysis jobs (async AI pipeline) +- Portfolio holdings and summary analytics +- Price refresh jobs (async) +- AI portfolio insight jobs (async) +- Task tracking endpoint and UI polling -## Tech Stack +## Architecture -- **Backend**: Elysia.js (Bun runtime) -- **Frontend**: Next.js 14 + TailwindCSS + Recharts -- **Database**: PostgreSQL with automatic P&L calculations -- **Data Sources**: SEC EDGAR API, Yahoo Finance API -- **Authentication**: NextAuth.js (GitHub, Google, Credentials) -- **Deployment**: Coolify (Docker Compose) +- `frontend/`: Next.js App Router UI +- `backend/`: Elysia API + Better Auth + domain routes +- `backend/src/worker.ts`: durable queue worker +- `docs/REBUILD_DECISIONS.md`: one-by-one architecture decisions -## Getting Started +Runtime topology: +1. Frontend web app +2. Backend API +3. Worker process for long tasks +4. PostgreSQL -### Prerequisites - -- Node.js 20+ -- Bun 1.0+ -- PostgreSQL 16 -- GitHub account -- Coolify instance with API access - -### Installation +## Local Setup ```bash -# Clone repository -git clone https://git.b11studio.xyz/francy51/fiscal-clone.git -cd fiscal-clone +cp .env.example .env +``` -# Install backend dependencies +### 1) Backend + +```bash cd backend bun install - -# Install frontend dependencies -cd frontend -npm install - -# Copy environment variables -cp .env.example .env -# Edit .env with your configuration -nano .env +bun run db:migrate +bun run dev ``` -### Environment Variables - -```env -# Database -DATABASE_URL=postgres://postgres:your_password@localhost:5432/fiscal -POSTGRES_USER=postgres -POSTGRES_PASSWORD=your_password -POSTGRES_DB=fiscal - -# Backend -PORT=3001 -NODE_ENV=production -JWT_SECRET=your-jwt-secret-key-min-32-characters -GITHUB_ID=your_github_oauth_client_id -GITHUB_SECRET=your_github_oauth_client_secret -GOOGLE_ID=your_google_oauth_client_id -GOOGLE_SECRET=your_google_oauth_client_secret - -# Frontend -NEXT_PUBLIC_API_URL=http://localhost:3001 -``` - -### Running Locally +### 2) Worker (new terminal) ```bash -# Run database migrations cd backend -bun run db:migrate +bun run dev:worker +``` -# Start backend -cd backend -bun run dev +### 3) Frontend (new terminal) -# Start frontend (new terminal) +```bash cd frontend +npm install npm run dev ``` -## Deployment via Gitea to Coolify +Frontend: `http://localhost:3000` +Backend: `http://localhost:3001` +Swagger: `http://localhost:3001/swagger` -### 1. Push to Gitea +## Docker Compose ```bash -# Initialize git -cd /data/workspace/fiscal-clone -git init -git add . -git commit -m "feat: Initial Fiscal Clone release" - -# Add remote -git remote add gitea https://git.b11studio.xyz/francy51/fiscal-clone.git - -# Push to Gitea -git push -u gitea:your-gitea-username main +docker compose up --build ``` -### 2. Deploy to Coolify +This starts: `postgres`, `backend`, `worker`, `frontend`. -In Coolify dashboard: +## Coolify -1. **Create Application** - - Type: Docker Compose - - Name: `fiscal-clone` - - Source: Git Repository - - Repository: `git@git.b11studio.xyz:francy51/fiscal-clone.git` - - Branch: `main` - - Build Context: `/` - - Docker Compose File: `docker-compose.yml` +Deploy using the root compose file and configure separate public domains for: +- `frontend` on port `3000` +- `backend` on port `3001` -2. **Configure Domains** - - Frontend: `fiscal.b11studio.xyz` - - Backend API: `api.fiscal.b11studio.xyz` +Use the full guide in `COOLIFY.md`. -3. **Add Environment Variables** - ``` - DATABASE_URL=postgres://postgres:password@postgres:5432/fiscal - POSTGRES_USER=postgres - POSTGRES_PASSWORD=your-password - POSTGRES_DB=fiscal - PORT=3001 - JWT_SECRET=your-jwt-secret - NEXT_PUBLIC_API_URL=http://backend:3000 - GITHUB_ID=your-github-oauth-id - GITHUB_SECRET=your-github-oauth-secret - GOOGLE_ID=your-google-oauth-id - GOOGLE_SECRET=your-google-oauth-secret - ``` +Critical variables for Coolify: +- `FRONTEND_URL` = frontend public URL +- `BETTER_AUTH_BASE_URL` = backend public URL +- `NEXT_PUBLIC_API_URL` = backend public URL (build-time in frontend) -4. **Deploy** +## Core API Surface -## API Endpoints +Auth: +- `ALL /api/auth/*` (Better Auth handler) +- `GET /api/me` -### Authentication -- `POST /api/auth/register` - Register new user -- `POST /api/auth/login` - Login -- `POST /api/auth/verify` - Verify JWT token +Watchlist: +- `GET /api/watchlist` +- `POST /api/watchlist` +- `DELETE /api/watchlist/:id` -### SEC Filings -- `GET /api/filings` - Get all filings -- `GET /api/filings/:ticker` - Get filings by ticker -- `POST /api/filings/refresh/:ticker` - Refresh filings +Portfolio: +- `GET /api/portfolio/holdings` +- `POST /api/portfolio/holdings` +- `PATCH /api/portfolio/holdings/:id` +- `DELETE /api/portfolio/holdings/:id` +- `GET /api/portfolio/summary` +- `POST /api/portfolio/refresh-prices` (queues task) +- `POST /api/portfolio/insights/generate` (queues task) +- `GET /api/portfolio/insights/latest` -### Portfolio -- `GET /api/portfolio/:userId` - Get portfolio -- `GET /api/portfolio/:userId/summary` - Get summary -- `POST /api/portfolio` - Add holding -- `PUT /api/portfolio/:id` - Update holding -- `DELETE /api/portfolio/:id` - Delete holding +Filings: +- `GET /api/filings?ticker=&limit=` +- `GET /api/filings/:accessionNumber` +- `POST /api/filings/sync` (queues task) +- `POST /api/filings/:accessionNumber/analyze` (queues task) -### Watchlist -- `GET /api/watchlist/:userId` - Get watchlist -- `POST /api/watchlist` - Add stock -- `DELETE /api/watchlist/:id` - Remove stock +Task tracking: +- `GET /api/tasks` +- `GET /api/tasks/:taskId` -### OpenClaw Integration -- `POST /api/openclaw/notify/filing` - Discord notification -- `POST /api/openclaw/insights/portfolio` - Portfolio analysis -- `POST /api/openclaw/insights/filing` - Filing analysis +## OpenClaw / ZeroClaw Integration -## Database Schema +Set these in `.env`: -### Users -```sql -CREATE TABLE users ( - id SERIAL PRIMARY KEY, - email VARCHAR(255) UNIQUE NOT NULL, - password TEXT NOT NULL, - name VARCHAR(255), - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP -); +```env +OPENCLAW_BASE_URL=http://localhost:4000 +OPENCLAW_API_KEY=... +OPENCLAW_MODEL=zeroclaw ``` -### Filings -```sql -CREATE TABLE filings ( - id SERIAL PRIMARY KEY, - ticker VARCHAR(10) NOT NULL, - filing_type VARCHAR(20) NOT NULL, - filing_date DATE NOT NULL, - accession_number VARCHAR(40) UNIQUE NOT NULL, - cik VARCHAR(20) NOT NULL, - company_name TEXT NOT NULL, - key_metrics JSONB, - insights TEXT, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP -); -``` +The backend expects an OpenAI-compatible `/v1/chat/completions` endpoint. -### Portfolio (with auto-calculations) -```sql -CREATE TABLE portfolio ( - id SERIAL PRIMARY KEY, - user_id INTEGER REFERENCES users(id) ON DELETE CASCADE, - ticker VARCHAR(10) NOT NULL, - shares NUMERIC(20, 4) NOT NULL, - avg_cost NUMERIC(10, 4) NOT NULL, - current_price NUMERIC(10, 4), - current_value NUMERIC(20, 4), - gain_loss NUMERIC(20, 4), - gain_loss_pct NUMERIC(10, 4), - last_updated TIMESTAMP, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - UNIQUE(user_id, ticker) -); -``` +## Decision Log -### Watchlist -```sql -CREATE TABLE watchlist ( - id SERIAL PRIMARY KEY, - user_id INTEGER REFERENCES users(id) ON DELETE CASCADE, - ticker VARCHAR(10) NOT NULL, - company_name TEXT NOT NULL, - sector VARCHAR(100), - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - UNIQUE(user_id, ticker) -); -``` - -## Project Structure - -``` -fiscal-clone/ -├── backend/ -│ ├── src/ -│ │ ├── index.ts # Main server -│ │ ├── db/ -│ │ │ ├── index.ts # Database connection -│ │ │ └── migrate.ts # Database migrations -│ │ ├── routes/ -│ │ │ ├── auth.ts # Authentication -│ │ │ ├── filings.ts # SEC filings API -│ │ │ ├── portfolio.ts # Portfolio management -│ │ │ ├── watchlist.ts # Watchlist management -│ │ │ └── openclaw.ts # AI integration -│ │ └── services/ -│ │ ├── sec.ts # SEC EDGAR scraper -│ │ └── prices.ts # Yahoo Finance service -│ ├── Dockerfile -│ ├── docker-compose.yml -│ └── package.json -├── frontend/ -│ ├── app/ -│ │ ├── layout.tsx # Root layout -│ │ ├── page.tsx # Dashboard -│ │ ├── auth/ -│ │ │ ├── signin/page.tsx # Login -│ │ │ └── signup/page.tsx # Registration -│ │ ├── portfolio/page.tsx # Portfolio management -│ │ ├── filings/page.tsx # SEC filings -│ │ └── watchlist/page.tsx # Watchlist -│ ├── lib/ -│ │ ├── auth.ts # Auth helpers -│ │ └── utils.ts # Utility functions -│ ├── globals.css -│ ├── tailwind.config.js -│ ├── next.config.js -│ ├── tsconfig.json -│ └── package.json -├── docker-compose.yml # Full stack deployment -└── .env.example # Environment variables template -``` - -## Features Status - -### ✅ Implemented -- [x] User authentication (GitHub, Google, Email/Password) -- [x] SEC EDGAR data scraping -- [x] Key metrics extraction from filings -- [x] Stock holdings tracking -- [x] Real-time price updates (Yahoo Finance) -- [x] Automatic P&L calculations -- [x] Portfolio value summary -- [x] Gain/loss tracking with percentages -- [x] Portfolio allocation pie chart -- [x] Performance line chart -- [x] Watchlist management -- [x] Add/delete holdings -- [x] Add/remove stocks from watchlist -- [x] OpenClaw AI integration endpoints -- [x] Database migrations with triggers -- [x] Full CRUD operations -- [x] Responsive design -- [x] Loading states -- [x] User feedback -- [x] Production-ready Docker configs - -### 🚀 Future Enhancements -- [ ] WebSocket for real-time stock prices -- [ ] Two-factor authentication -- [ ] More filing types (S-1, 13D, DEF 14A, etc.) -- [ ] PDF parsing for full filing documents -- [ ] Export functionality (CSV, PDF) -- [ ] Mobile app -- [ ] Advanced analytics and reports -- [ ] Social features (follow portfolios, share holdings) -- [ ] Custom alerts and notifications -- [ ] Tax reporting features - -## Security - -- Passwords hashed with bcryptjs -- JWT tokens with 30-day expiration -- Protected routes with session checks -- CORS configured for allowed origins -- SQL injection prevention with parameterized queries -- XSS prevention with proper input sanitization -- HTTPS support (via Coolify proxy) -- Environment variables for sensitive data - -## Contributing - -1. Fork the repository -2. Create a feature branch -3. Commit your changes -4. Push to the branch -5. Create a Pull Request - -## License - -MIT License - see LICENSE file for details - -## Support - -For issues or questions: -- Open an issue on Gitea -- Check the documentation -- Contact the maintainers - ---- - -**Status:** ✅ Production Ready -**Version:** 1.0.0 -**Last Updated:** 2026-02-15 +See `docs/REBUILD_DECISIONS.md` for the detailed rationale and tradeoffs behind each major design choice. diff --git a/backend/Dockerfile b/backend/Dockerfile index 3bddc71..a997db7 100644 --- a/backend/Dockerfile +++ b/backend/Dockerfile @@ -1,14 +1,12 @@ -FROM node:20-alpine AS base +FROM node:20-alpine + WORKDIR /app -# Install Bun and update npm RUN npm install -g bun && npm install -g npm@latest -# Install dependencies -COPY package.json bun.lockb* ./ -RUN bun install +COPY package.json bun.lock* ./ +RUN bun install --frozen-lockfile || bun install -# Copy source code COPY . . ENV NODE_ENV=production @@ -16,5 +14,4 @@ ENV PORT=3001 EXPOSE 3001 -# Run directly from TypeScript source (Bun can execute TypeScript directly) CMD ["bun", "run", "src/index.ts"] diff --git a/backend/docker-compose.yml b/backend/docker-compose.yml index 53f74b9..3ccc4cb 100644 --- a/backend/docker-compose.yml +++ b/backend/docker-compose.yml @@ -1,43 +1,64 @@ services: + postgres: + image: postgres:16-alpine + restart: unless-stopped + environment: + POSTGRES_USER: ${POSTGRES_USER:-postgres} + POSTGRES_PASSWORD: ${POSTGRES_PASSWORD:-postgres} + POSTGRES_DB: ${POSTGRES_DB:-fiscal} + volumes: + - postgres_data:/var/lib/postgresql/data + healthcheck: + test: ['CMD-SHELL', 'pg_isready -U ${POSTGRES_USER:-postgres} -d ${POSTGRES_DB:-fiscal}'] + interval: 5s + timeout: 5s + retries: 10 + backend: build: context: . dockerfile: Dockerfile restart: unless-stopped + command: ['sh', '-c', 'bun run src/db/migrate.ts && bun run src/index.ts'] environment: - - DATABASE_URL=postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@postgres:5432/${POSTGRES_DB} - - PORT=3001 + DATABASE_URL: ${DATABASE_URL:-postgres://postgres:postgres@postgres:5432/fiscal} + PORT: ${PORT:-3001} + FRONTEND_URL: ${FRONTEND_URL:-http://localhost:3000} + BETTER_AUTH_SECRET: ${BETTER_AUTH_SECRET:-local-dev-better-auth-secret-change-me} + BETTER_AUTH_BASE_URL: ${BETTER_AUTH_BASE_URL:-http://localhost:3001} + SEC_USER_AGENT: ${SEC_USER_AGENT:-Fiscal Clone } + OPENCLAW_BASE_URL: ${OPENCLAW_BASE_URL:-} + OPENCLAW_API_KEY: ${OPENCLAW_API_KEY:-} + OPENCLAW_MODEL: ${OPENCLAW_MODEL:-zeroclaw} + TASK_HEARTBEAT_SECONDS: ${TASK_HEARTBEAT_SECONDS:-15} + TASK_STALE_SECONDS: ${TASK_STALE_SECONDS:-120} + TASK_MAX_ATTEMPTS: ${TASK_MAX_ATTEMPTS:-3} depends_on: postgres: condition: service_healthy - healthcheck: - test: ["CMD", "curl", "-f", "http://localhost:3001/api/health"] - interval: 30s - timeout: 10s - retries: 3 - networks: - - fiscal - postgres: - image: postgres:16-alpine + worker: + build: + context: . + dockerfile: Dockerfile restart: unless-stopped + command: ['sh', '-c', 'bun run src/db/migrate.ts && bun run src/worker.ts'] environment: - - POSTGRES_USER=${POSTGRES_USER} - - POSTGRES_PASSWORD=${POSTGRES_PASSWORD} - - POSTGRES_DB=${POSTGRES_DB} - volumes: - - postgres_data:/var/lib/postgresql/data - healthcheck: - test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER} -d ${POSTGRES_DB}"] - interval: 5s - timeout: 5s - retries: 10 - networks: - - fiscal + DATABASE_URL: ${DATABASE_URL:-postgres://postgres:postgres@postgres:5432/fiscal} + PORT: ${PORT:-3001} + FRONTEND_URL: ${FRONTEND_URL:-http://localhost:3000} + BETTER_AUTH_SECRET: ${BETTER_AUTH_SECRET:-local-dev-better-auth-secret-change-me} + BETTER_AUTH_BASE_URL: ${BETTER_AUTH_BASE_URL:-http://localhost:3001} + SEC_USER_AGENT: ${SEC_USER_AGENT:-Fiscal Clone } + OPENCLAW_BASE_URL: ${OPENCLAW_BASE_URL:-} + OPENCLAW_API_KEY: ${OPENCLAW_API_KEY:-} + OPENCLAW_MODEL: ${OPENCLAW_MODEL:-zeroclaw} + TASK_HEARTBEAT_SECONDS: ${TASK_HEARTBEAT_SECONDS:-15} + TASK_STALE_SECONDS: ${TASK_STALE_SECONDS:-120} + TASK_MAX_ATTEMPTS: ${TASK_MAX_ATTEMPTS:-3} + depends_on: + postgres: + condition: service_healthy volumes: postgres_data: - -networks: - fiscal: - external: true diff --git a/backend/package.json b/backend/package.json index 8ad333d..162a0bd 100644 --- a/backend/package.json +++ b/backend/package.json @@ -1,28 +1,26 @@ { "name": "fiscal-backend", - "version": "0.1.0", + "version": "2.0.0", + "private": true, "scripts": { "dev": "bun run --watch src/index.ts", + "dev:worker": "bun run --watch src/worker.ts", "start": "bun run src/index.ts", - "db:migrate": "bun run src/db/migrate.ts", - "db:seed": "bun run src/db/seed.ts" + "start:worker": "bun run src/worker.ts", + "db:migrate": "bun run src/db/migrate.ts" }, "dependencies": { "@elysiajs/cors": "^1.4.1", "@elysiajs/swagger": "^1.3.1", - "bcryptjs": "^3.0.3", "better-auth": "^1.4.18", "dotenv": "^17.3.1", "elysia": "^1.4.25", - "jsonwebtoken": "^9.0.3", "pg": "^8.18.0", "postgres": "^3.4.8", "zod": "^4.3.6" }, "devDependencies": { "@types/pg": "^8.16.0", - "@types/bcryptjs": "^3.0.0", - "@types/jsonwebtoken": "^9.0.10", "bun-types": "latest" } } diff --git a/backend/src/auth.ts b/backend/src/auth.ts index bad2327..e8204fc 100644 --- a/backend/src/auth.ts +++ b/backend/src/auth.ts @@ -1,34 +1,38 @@ -import { betterAuth } from "better-auth"; -import { Pool } from "pg"; +import { betterAuth } from 'better-auth'; +import { Pool } from 'pg'; +import { env } from './config'; -const defaultDatabaseUrl = `postgres://${process.env.POSTGRES_USER || 'postgres'}:${process.env.POSTGRES_PASSWORD || 'postgres'}@${process.env.POSTGRES_HOST || 'localhost'}:5432/${process.env.POSTGRES_DB || 'fiscal'}`; -const defaultFrontendUrl = process.env.FRONTEND_URL || 'http://localhost:3000'; -const trustedOrigins = defaultFrontendUrl - .split(',') - .map((origin) => origin.trim()) - .filter(Boolean); +const pool = new Pool({ + connectionString: env.DATABASE_URL, + max: 20, + idleTimeoutMillis: 30_000 +}); export const auth = betterAuth({ - database: new Pool({ - connectionString: process.env.DATABASE_URL || defaultDatabaseUrl, - }), - trustedOrigins, + secret: env.BETTER_AUTH_SECRET, + baseURL: env.BETTER_AUTH_BASE_URL, + database: pool, + trustedOrigins: env.FRONTEND_ORIGINS, emailAndPassword: { enabled: true, - autoSignIn: true, + autoSignIn: true }, user: { - modelName: "users", + modelName: 'users', additionalFields: { name: { - type: "string", - required: false, + type: 'string', + required: false }, - }, + image: { + type: 'string', + required: false + } + } }, advanced: { database: { - generateId: false, // Use PostgreSQL serial for users table - }, - }, + generateId: false + } + } }); diff --git a/backend/src/better-auth-migrate.ts b/backend/src/better-auth-migrate.ts deleted file mode 100644 index 9062612..0000000 --- a/backend/src/better-auth-migrate.ts +++ /dev/null @@ -1,109 +0,0 @@ -import { db } from './db'; - -async function migrateToBetterAuth() { - console.log('Migrating to Better Auth schema...'); - - try { - // Add Better Auth columns to users table - await db` - ALTER TABLE users - ADD COLUMN IF NOT EXISTS email_verified BOOLEAN DEFAULT FALSE - `; - - await db` - ALTER TABLE users - ADD COLUMN IF NOT EXISTS image TEXT - `; - - console.log('✅ Added Better Auth columns to users table'); - - // Create session table - await db` - CREATE TABLE IF NOT EXISTS session ( - id TEXT PRIMARY KEY, - user_id INTEGER REFERENCES users(id) ON DELETE CASCADE, - token TEXT NOT NULL UNIQUE, - expires_at TIMESTAMP NOT NULL, - ip_address TEXT, - user_agent TEXT, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP - ) - `; - - console.log('✅ Created session table'); - - // Create account table - await db` - CREATE TABLE IF NOT EXISTS account ( - id TEXT PRIMARY KEY, - user_id INTEGER REFERENCES users(id) ON DELETE CASCADE, - account_id TEXT NOT NULL, - provider_id TEXT NOT NULL, - access_token TEXT, - refresh_token TEXT, - access_token_expires_at TIMESTAMP, - refresh_token_expires_at TIMESTAMP, - scope TEXT, - id_token TEXT, - password TEXT, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - UNIQUE(user_id, provider_id, account_id) - ) - `; - - console.log('✅ Created account table'); - - // Create verification table - await db` - CREATE TABLE IF NOT EXISTS verification ( - id TEXT PRIMARY KEY, - identifier TEXT NOT NULL, - value TEXT NOT NULL, - expires_at TIMESTAMP NOT NULL, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP - ) - `; - - console.log('✅ Created verification table'); - - // Create indexes - await db`CREATE INDEX IF NOT EXISTS idx_session_user_id ON session(user_id)`; - await db`CREATE INDEX IF NOT EXISTS idx_session_token ON session(token)`; - await db`CREATE INDEX IF NOT EXISTS idx_session_expires_at ON session(expires_at)`; - await db`CREATE INDEX IF NOT EXISTS idx_account_user_id ON account(user_id)`; - await db`CREATE INDEX IF NOT EXISTS idx_account_provider_id ON account(provider_id)`; - await db`CREATE INDEX IF NOT EXISTS idx_verification_identifier ON verification(identifier)`; - await db`CREATE INDEX IF NOT EXISTS idx_verification_expires_at ON verification(expires_at)`; - - console.log('✅ Created indexes'); - - // Migrate existing users to account table for credential auth - await db` - INSERT INTO account (id, user_id, account_id, provider_id, password, created_at, updated_at) - SELECT - gen_random_uuid(), - id, - id::text, - 'credential', - password, - created_at, - updated_at - FROM users - WHERE password IS NOT NULL - ON CONFLICT DO NOTHING - `; - - console.log('✅ Migrated existing users to account table'); - - console.log('✅ Better Auth migration completed!'); - process.exit(0); - } catch (error) { - console.error('❌ Migration failed:', error); - process.exit(1); - } -} - -migrateToBetterAuth(); diff --git a/backend/src/config.ts b/backend/src/config.ts new file mode 100644 index 0000000..b6d0259 --- /dev/null +++ b/backend/src/config.ts @@ -0,0 +1,47 @@ +import * as dotenv from 'dotenv'; +import { z } from 'zod'; + +dotenv.config(); + +const schema = z.object({ + NODE_ENV: z.enum(['development', 'test', 'production']).default('development'), + PORT: z.coerce.number().int().positive().default(3001), + DATABASE_URL: z.string().optional(), + POSTGRES_USER: z.string().default('postgres'), + POSTGRES_PASSWORD: z.string().default('postgres'), + POSTGRES_HOST: z.string().default('localhost'), + POSTGRES_DB: z.string().default('fiscal'), + FRONTEND_URL: z.string().default('http://localhost:3000'), + BETTER_AUTH_SECRET: z.string().min(16).default('local-dev-better-auth-secret-change-me-1234'), + BETTER_AUTH_BASE_URL: z.string().url().default('http://localhost:3001'), + SEC_USER_AGENT: z.string().default('Fiscal Clone '), + OPENCLAW_BASE_URL: z.preprocess( + (value) => (typeof value === 'string' && value.trim() === '' ? undefined : value), + z.string().url().optional() + ), + OPENCLAW_API_KEY: z.preprocess( + (value) => (typeof value === 'string' && value.trim() === '' ? undefined : value), + z.string().optional() + ), + OPENCLAW_MODEL: z.string().default('zeroclaw'), + TASK_HEARTBEAT_SECONDS: z.coerce.number().int().positive().default(15), + TASK_STALE_SECONDS: z.coerce.number().int().positive().default(120), + TASK_MAX_ATTEMPTS: z.coerce.number().int().positive().default(3) +}); + +const parsed = schema.safeParse(process.env); + +if (!parsed.success) { + console.error('Invalid environment configuration', parsed.error.flatten().fieldErrors); + throw new Error('Invalid environment variables'); +} + +const rawEnv = parsed.data; +const databaseUrl = rawEnv.DATABASE_URL + ?? `postgres://${rawEnv.POSTGRES_USER}:${rawEnv.POSTGRES_PASSWORD}@${rawEnv.POSTGRES_HOST}:5432/${rawEnv.POSTGRES_DB}`; + +export const env = { + ...rawEnv, + DATABASE_URL: databaseUrl, + FRONTEND_ORIGINS: rawEnv.FRONTEND_URL.split(',').map((origin) => origin.trim()).filter(Boolean) +}; diff --git a/backend/src/db/index.ts b/backend/src/db/index.ts index 27a1f47..8f60560 100644 --- a/backend/src/db/index.ts +++ b/backend/src/db/index.ts @@ -1,47 +1,13 @@ import postgres from 'postgres'; +import { env } from '../config'; -const defaultDatabaseUrl = `postgres://${process.env.POSTGRES_USER || 'postgres'}:${process.env.POSTGRES_PASSWORD || 'postgres'}@${process.env.POSTGRES_HOST || 'localhost'}:5432/${process.env.POSTGRES_DB || 'fiscal'}`; - -const sql = postgres(process.env.DATABASE_URL || defaultDatabaseUrl, { - max: 10, +export const db = postgres(env.DATABASE_URL, { + max: 20, idle_timeout: 20, - connect_timeout: 10 + connect_timeout: 10, + prepare: true }); -export const db = sql; - -export type Filings = { - id: number; - ticker: string; - filing_type: string; - filing_date: Date; - accession_number: string; - cik: string; - company_name: string; - key_metrics?: any; - insights?: string; - created_at: Date; -}; - -export type Portfolio = { - id: number; - user_id: string; - ticker: string; - shares: number; - avg_cost: number; - current_price?: number; - current_value?: number; - gain_loss?: number; - gain_loss_pct?: number; - last_updated?: Date; - created_at: Date; -}; - -export type Watchlist = { - id: number; - user_id: string; - ticker: string; - company_name: string; - sector?: string; - created_at: Date; -}; +export async function closeDb() { + await db.end({ timeout: 5 }); +} diff --git a/backend/src/db/migrate.ts b/backend/src/db/migrate.ts index 1b69cb6..ab8e85f 100644 --- a/backend/src/db/migrate.ts +++ b/backend/src/db/migrate.ts @@ -1,107 +1,256 @@ import { db } from './index'; async function migrate() { - console.log('Running migrations...'); + console.log('Running database migrations...'); + + await db`CREATE EXTENSION IF NOT EXISTS pgcrypto`; - // Create users table await db` CREATE TABLE IF NOT EXISTS users ( id SERIAL PRIMARY KEY, - email VARCHAR(255) UNIQUE NOT NULL, - password TEXT NOT NULL, - name VARCHAR(255), - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - updated_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP + email TEXT UNIQUE NOT NULL, + email_verified BOOLEAN NOT NULL DEFAULT FALSE, + name TEXT, + image TEXT, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW() + ) + `; + + await db`ALTER TABLE users ADD COLUMN IF NOT EXISTS email_verified BOOLEAN NOT NULL DEFAULT FALSE`; + await db`ALTER TABLE users ADD COLUMN IF NOT EXISTS image TEXT`; + + await db` + DO $$ + BEGIN + IF EXISTS ( + SELECT 1 + FROM information_schema.columns + WHERE table_name = 'users' + AND column_name = 'password' + ) THEN + EXECUTE 'ALTER TABLE users ALTER COLUMN password DROP NOT NULL'; + END IF; + END + $$ + `; + + await db` + CREATE TABLE IF NOT EXISTS session ( + id TEXT PRIMARY KEY, + user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE, + token TEXT NOT NULL UNIQUE, + expires_at TIMESTAMPTZ NOT NULL, + ip_address TEXT, + user_agent TEXT, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW() + ) + `; + + await db` + CREATE TABLE IF NOT EXISTS account ( + id TEXT PRIMARY KEY, + user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE, + account_id TEXT NOT NULL, + provider_id TEXT NOT NULL, + access_token TEXT, + refresh_token TEXT, + access_token_expires_at TIMESTAMPTZ, + refresh_token_expires_at TIMESTAMPTZ, + scope TEXT, + id_token TEXT, + password TEXT, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + UNIQUE(user_id, provider_id, account_id) + ) + `; + + await db` + CREATE TABLE IF NOT EXISTS verification ( + id TEXT PRIMARY KEY, + identifier TEXT NOT NULL, + value TEXT NOT NULL, + expires_at TIMESTAMPTZ NOT NULL, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW() ) `; - // Create filings table await db` CREATE TABLE IF NOT EXISTS filings ( - id SERIAL PRIMARY KEY, - ticker VARCHAR(10) NOT NULL, + id BIGSERIAL PRIMARY KEY, + ticker VARCHAR(12) NOT NULL, filing_type VARCHAR(20) NOT NULL, filing_date DATE NOT NULL, - accession_number VARCHAR(40) UNIQUE NOT NULL, + accession_number VARCHAR(40) NOT NULL UNIQUE, cik VARCHAR(20) NOT NULL, company_name TEXT NOT NULL, - key_metrics JSONB, - insights TEXT, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP + filing_url TEXT, + metrics JSONB, + analysis JSONB, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW() ) `; - // Create portfolio table + await db`ALTER TABLE filings ADD COLUMN IF NOT EXISTS filing_url TEXT`; + await db`ALTER TABLE filings ADD COLUMN IF NOT EXISTS metrics JSONB`; + await db`ALTER TABLE filings ADD COLUMN IF NOT EXISTS analysis JSONB`; + await db`ALTER TABLE filings ADD COLUMN IF NOT EXISTS updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW()`; + await db` - CREATE TABLE IF NOT EXISTS portfolio ( - id SERIAL PRIMARY KEY, - user_id INTEGER REFERENCES users(id) ON DELETE CASCADE, - ticker VARCHAR(10) NOT NULL, - shares NUMERIC(20, 4) NOT NULL, - avg_cost NUMERIC(10, 4) NOT NULL, - current_price NUMERIC(10, 4), - current_value NUMERIC(20, 4), - gain_loss NUMERIC(20, 4), - gain_loss_pct NUMERIC(10, 4), - last_updated TIMESTAMP, - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, - UNIQUE(user_id, ticker) - ) + DO $$ + BEGIN + IF EXISTS ( + SELECT 1 + FROM information_schema.columns + WHERE table_name = 'filings' + AND column_name = 'key_metrics' + ) THEN + EXECUTE 'UPDATE filings SET metrics = COALESCE(metrics, key_metrics) WHERE metrics IS NULL'; + END IF; + + IF EXISTS ( + SELECT 1 + FROM information_schema.columns + WHERE table_name = 'filings' + AND column_name = 'insights' + ) THEN + EXECUTE $migrate$ + UPDATE filings + SET analysis = COALESCE(analysis, jsonb_build_object('legacyInsights', insights)) + WHERE analysis IS NULL + AND insights IS NOT NULL + $migrate$; + END IF; + END + $$ `; - // Create watchlist table await db` CREATE TABLE IF NOT EXISTS watchlist ( - id SERIAL PRIMARY KEY, - user_id INTEGER REFERENCES users(id) ON DELETE CASCADE, - ticker VARCHAR(10) NOT NULL, + id BIGSERIAL PRIMARY KEY, + user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE, + ticker VARCHAR(12) NOT NULL, company_name TEXT NOT NULL, - sector VARCHAR(100), - created_at TIMESTAMP DEFAULT CURRENT_TIMESTAMP, + sector VARCHAR(120), + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), UNIQUE(user_id, ticker) ) `; - // Create indexes - await db`CREATE INDEX IF NOT EXISTS idx_users_email ON users(email)`; - await db`CREATE INDEX IF NOT EXISTS idx_filings_ticker ON filings(ticker)`; - await db`CREATE INDEX IF NOT EXISTS idx_filings_date ON filings(filing_date DESC)`; - await db`CREATE INDEX IF NOT EXISTS idx_portfolio_user ON portfolio(user_id)`; - await db`CREATE INDEX IF NOT EXISTS idx_watchlist_user ON watchlist(user_id)`; - - // Create function to update portfolio prices await db` - CREATE OR REPLACE FUNCTION update_portfolio_prices() - RETURNS TRIGGER AS $$ + CREATE TABLE IF NOT EXISTS holdings ( + id BIGSERIAL PRIMARY KEY, + user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE, + ticker VARCHAR(12) NOT NULL, + shares NUMERIC(20, 4) NOT NULL, + avg_cost NUMERIC(12, 4) NOT NULL, + current_price NUMERIC(12, 4), + market_value NUMERIC(20, 4) GENERATED ALWAYS AS ((COALESCE(current_price, avg_cost) * shares)) STORED, + gain_loss NUMERIC(20, 4) GENERATED ALWAYS AS (((COALESCE(current_price, avg_cost) - avg_cost) * shares)) STORED, + gain_loss_pct NUMERIC(12, 4) GENERATED ALWAYS AS ( + CASE + WHEN avg_cost > 0 THEN (((COALESCE(current_price, avg_cost) - avg_cost) / avg_cost) * 100) + ELSE 0 + END + ) STORED, + last_price_at TIMESTAMPTZ, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + UNIQUE(user_id, ticker) + ) + `; + + await db` + DO $$ BEGIN - NEW.current_value := NEW.shares * NEW.current_price; - NEW.gain_loss := NEW.current_value - (NEW.shares * NEW.avg_cost); - NEW.gain_loss_pct := CASE - WHEN NEW.avg_cost > 0 THEN ((NEW.current_price - NEW.avg_cost) / NEW.avg_cost) * 100 - ELSE 0 - END; - NEW.last_updated := NOW(); - RETURN NEW; - END; - $$ LANGUAGE plpgsql; + IF EXISTS ( + SELECT 1 + FROM information_schema.tables + WHERE table_name = 'portfolio' + ) THEN + EXECUTE $migrate$ + INSERT INTO holdings ( + user_id, + ticker, + shares, + avg_cost, + current_price, + last_price_at, + created_at, + updated_at + ) + SELECT + user_id, + ticker, + shares, + avg_cost, + current_price, + last_updated, + created_at, + NOW() + FROM portfolio + ON CONFLICT (user_id, ticker) DO NOTHING + $migrate$; + END IF; + END + $$ `; - // Create trigger await db` - DROP TRIGGER IF EXISTS update_portfolio_prices_trigger ON portfolio - `; - await db` - CREATE TRIGGER update_portfolio_prices_trigger - BEFORE INSERT OR UPDATE ON portfolio - FOR EACH ROW - EXECUTE FUNCTION update_portfolio_prices() + CREATE TABLE IF NOT EXISTS portfolio_insights ( + id BIGSERIAL PRIMARY KEY, + user_id INTEGER NOT NULL REFERENCES users(id) ON DELETE CASCADE, + provider TEXT NOT NULL, + model TEXT NOT NULL, + content TEXT NOT NULL, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW() + ) `; - console.log('✅ Migrations completed!'); - process.exit(0); + await db` + CREATE TABLE IF NOT EXISTS long_tasks ( + id UUID PRIMARY KEY DEFAULT gen_random_uuid(), + task_type TEXT NOT NULL, + status TEXT NOT NULL, + priority INTEGER NOT NULL DEFAULT 50, + payload JSONB NOT NULL, + result JSONB, + error TEXT, + attempts INTEGER NOT NULL DEFAULT 0, + max_attempts INTEGER NOT NULL DEFAULT 3, + scheduled_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + started_at TIMESTAMPTZ, + heartbeat_at TIMESTAMPTZ, + finished_at TIMESTAMPTZ, + created_by INTEGER REFERENCES users(id) ON DELETE SET NULL, + created_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + updated_at TIMESTAMPTZ NOT NULL DEFAULT NOW(), + CONSTRAINT long_tasks_status_check CHECK (status IN ('queued', 'running', 'completed', 'failed')) + ) + `; + + await db`CREATE INDEX IF NOT EXISTS idx_users_email ON users(email)`; + await db`CREATE INDEX IF NOT EXISTS idx_session_token ON session(token)`; + await db`CREATE INDEX IF NOT EXISTS idx_session_user ON session(user_id)`; + await db`CREATE INDEX IF NOT EXISTS idx_account_user ON account(user_id)`; + await db`CREATE INDEX IF NOT EXISTS idx_watchlist_user ON watchlist(user_id)`; + await db`CREATE INDEX IF NOT EXISTS idx_holdings_user ON holdings(user_id)`; + await db`CREATE INDEX IF NOT EXISTS idx_filings_ticker_date ON filings(ticker, filing_date DESC)`; + await db`CREATE INDEX IF NOT EXISTS idx_filings_accession ON filings(accession_number)`; + await db`CREATE INDEX IF NOT EXISTS idx_portfolio_insights_user ON portfolio_insights(user_id, created_at DESC)`; + await db`CREATE INDEX IF NOT EXISTS idx_long_tasks_status_sched ON long_tasks(status, scheduled_at, priority DESC, created_at)`; + await db`CREATE INDEX IF NOT EXISTS idx_long_tasks_user ON long_tasks(created_by, created_at DESC)`; + + console.log('Migrations completed successfully.'); } -migrate().catch(error => { - console.error('❌ Migration failed:', error); - process.exit(1); -}); +migrate() + .then(() => process.exit(0)) + .catch((error) => { + console.error('Migration failed', error); + process.exit(1); + }); diff --git a/backend/src/index.ts b/backend/src/index.ts index 25fec57..ac4cad2 100644 --- a/backend/src/index.ts +++ b/backend/src/index.ts @@ -1,51 +1,58 @@ import { Elysia } from 'elysia'; import { cors } from '@elysiajs/cors'; import { swagger } from '@elysiajs/swagger'; -import * as dotenv from 'dotenv'; - -dotenv.config(); - +import { env } from './config'; import { db } from './db'; -import { filingsRoutes } from './routes/filings'; -import { portfolioRoutes } from './routes/portfolio'; -import { openclawRoutes } from './routes/openclaw'; -import { watchlistRoutes } from './routes/watchlist'; import { betterAuthRoutes } from './routes/better-auth'; +import { filingsRoutes } from './routes/filings'; +import { meRoutes } from './routes/me'; +import { openclawRoutes } from './routes/openclaw'; +import { portfolioRoutes } from './routes/portfolio'; +import { taskRoutes } from './routes/tasks'; +import { watchlistRoutes } from './routes/watchlist'; -const frontendOrigin = process.env.FRONTEND_URL || 'http://localhost:3000'; - -const app = new Elysia({ - prefix: '/api' -}) +const app = new Elysia({ prefix: '/api' }) .use(cors({ - origin: frontendOrigin, + origin: env.FRONTEND_ORIGINS, credentials: true, - methods: ['GET', 'POST', 'PUT', 'DELETE', 'OPTIONS'] + allowedHeaders: ['Content-Type', 'Authorization'], + methods: ['GET', 'POST', 'PUT', 'PATCH', 'DELETE', 'OPTIONS'] })) .use(swagger({ documentation: { info: { title: 'Fiscal Clone API', - version: '1.0.0', - description: 'Financial filings and portfolio analytics API' + version: '2.0.0', + description: 'Futuristic fiscal intelligence API with durable jobs and OpenClaw integration.' } } })) .use(betterAuthRoutes) - .use(filingsRoutes) - .use(portfolioRoutes) + .use(meRoutes) .use(watchlistRoutes) + .use(portfolioRoutes) + .use(filingsRoutes) .use(openclawRoutes) + .use(taskRoutes) + .get('/health', async () => { + const queueRows = await db` + SELECT status, COUNT(*)::int AS count + FROM long_tasks + GROUP BY status + `; - // Health check - .get('/health', () => ({ - status: 'ok', - timestamp: new Date().toISOString(), - version: '1.0.0', - database: 'connected' - })) + return { + status: 'ok', + version: '2.0.0', + timestamp: new Date().toISOString(), + queue: queueRows.reduce>((acc, row) => { + acc[row.status] = row.count; + return acc; + }, {}) + }; + }); - .listen(process.env.PORT || 3001); +app.listen(env.PORT); -console.log(`🚀 Backend running on http://localhost:${app.server?.port}`); -console.log(`📚 Swagger docs: http://localhost:${app.server?.port}/swagger`); +console.log(`Fiscal backend listening on http://localhost:${app.server?.port}`); +console.log(`Swagger docs: http://localhost:${app.server?.port}/swagger`); diff --git a/backend/src/routes/auth.ts b/backend/src/routes/auth.ts deleted file mode 100644 index d1a68ce..0000000 --- a/backend/src/routes/auth.ts +++ /dev/null @@ -1,122 +0,0 @@ -import { Elysia, t } from 'elysia'; -import * as bcrypt from 'bcryptjs'; -import jwt from 'jsonwebtoken'; -import { db } from '../db'; - -const JWT_SECRET = process.env.JWT_SECRET || 'your-secret-key-change-in-production'; - -export const authRoutes = new Elysia({ prefix: '/auth' }) - /** - * Register new user - */ - .post('/register', async ({ body }) => { - const { email, password, name } = body; - - // Check if user exists - const existing = await db` - SELECT id FROM users WHERE email = ${email} - `; - - if (existing.length > 0) { - return { error: 'User already exists' }; - } - - // Hash password - const hashedPassword = await bcrypt.hash(password, 10); - - // Create user - const result = await db` - INSERT INTO users ${db({ email, password: hashedPassword, name })} - RETURNING id, email, name - `; - - const user = result[0]; - - // Generate JWT - const token = jwt.sign( - { id: user.id, email: user.email }, - JWT_SECRET, - { expiresIn: '30d' } - ); - - return { - success: true, - user: { id: user.id, email: user.email, name: user.name }, - token - }; - }, { - body: t.Object({ - email: t.String({ format: 'email' }), - password: t.String({ minLength: 8 }), - name: t.String() - }) - }) - - /** - * Login - */ - .post('/login', async ({ body }) => { - const { email, password } = body; - - // Find user - const users = await db` - SELECT * FROM users WHERE email = ${email} - `; - - if (users.length === 0) { - return { error: 'Invalid credentials' }; - } - - const user = users[0]; - - // Verify password - const validPassword = await bcrypt.compare(password, user.password); - - if (!validPassword) { - return { error: 'Invalid credentials' }; - } - - // Generate JWT - const token = jwt.sign( - { id: user.id, email: user.email }, - JWT_SECRET, - { expiresIn: '30d' } - ); - - return { - success: true, - user: { id: user.id, email: user.email, name: user.name }, - token - }; - }, { - body: t.Object({ - email: t.String({ format: 'email' }), - password: t.String() - }) - }) - - /** - * Verify token (for NextAuth credentials provider) - */ - .post('/verify', async ({ body, set }) => { - try { - const decoded = jwt.verify(body.token, JWT_SECRET) as any; - - if (!decoded.id || !decoded.email) { - set.status = 401; - return { error: 'Invalid token' }; - } - - return { - success: true, - user: { id: decoded.id, email: decoded.email } - }; - } catch (error) { - set.status = 401; - return { error: 'Invalid token' }; - } - }, { - body: t.Object({ - token: t.String() - }) - }); diff --git a/backend/src/routes/better-auth.ts b/backend/src/routes/better-auth.ts index f3944f0..2b2caae 100644 --- a/backend/src/routes/better-auth.ts +++ b/backend/src/routes/better-auth.ts @@ -1,7 +1,7 @@ import { Elysia } from 'elysia'; import { auth } from '../auth'; -export const betterAuthRoutes = new Elysia() - .all('/auth/*', async ({ request }) => { - return auth.handler(request); +export const betterAuthRoutes = new Elysia({ prefix: '/auth' }) + .all('/*', async ({ request }) => { + return await auth.handler(request); }); diff --git a/backend/src/routes/error.ts b/backend/src/routes/error.ts new file mode 100644 index 0000000..16309d2 --- /dev/null +++ b/backend/src/routes/error.ts @@ -0,0 +1,16 @@ +import { UnauthorizedError } from '../session'; + +export function toHttpError(set: { status: number }, error: unknown) { + if (error instanceof UnauthorizedError) { + set.status = 401; + return { error: error.message }; + } + + if (error instanceof Error) { + set.status = 500; + return { error: error.message }; + } + + set.status = 500; + return { error: 'Unexpected error' }; +} diff --git a/backend/src/routes/filings.ts b/backend/src/routes/filings.ts index 1074a49..d7b03d2 100644 --- a/backend/src/routes/filings.ts +++ b/backend/src/routes/filings.ts @@ -1,47 +1,107 @@ import { Elysia, t } from 'elysia'; -import { SECScraper } from '../services/sec'; import { db } from '../db'; - -const sec = new SECScraper(); +import { requireSessionUser } from '../session'; +import { enqueueTask } from '../tasks/repository'; +import { toHttpError } from './error'; export const filingsRoutes = new Elysia({ prefix: '/filings' }) - .get('/', async () => { - const filings = await db` - SELECT * FROM filings - ORDER BY filing_date DESC - LIMIT 100 - `; - return filings; - }) + .get('/', async ({ request, set, query }) => { + try { + await requireSessionUser(request); + const tickerFilter = query.ticker?.trim().toUpperCase(); + const limit = Number(query.limit ?? 50); + const safeLimit = Number.isFinite(limit) ? Math.min(Math.max(limit, 1), 200) : 50; - .get('/:ticker', async ({ params }) => { - const filings = await db` - SELECT * FROM filings - WHERE ticker = ${params.ticker.toUpperCase()} - ORDER BY filing_date DESC - LIMIT 50 - `; - return filings; - }) + const rows = tickerFilter + ? await db` + SELECT * + FROM filings + WHERE ticker = ${tickerFilter} + ORDER BY filing_date DESC, created_at DESC + LIMIT ${safeLimit} + ` + : await db` + SELECT * + FROM filings + ORDER BY filing_date DESC, created_at DESC + LIMIT ${safeLimit} + `; - .get('/details/:accessionNumber', async ({ params }) => { - const details = await db` - SELECT * FROM filings - WHERE accession_number = ${params.accessionNumber} - `; - return details[0] || null; - }) - - .post('/refresh/:ticker', async ({ params }) => { - const newFilings = await sec.searchFilings(params.ticker, 5); - - for (const filing of newFilings) { - const metrics = await sec['extractKeyMetrics'](filing); - await db` - INSERT INTO filings ${db(filing, metrics)} - ON CONFLICT (accession_number) DO NOTHING - `; + return { filings: rows }; + } catch (error) { + return toHttpError(set, error); } + }, { + query: t.Object({ + ticker: t.Optional(t.String()), + limit: t.Optional(t.Numeric()) + }) + }) + .get('/:accessionNumber', async ({ request, set, params }) => { + try { + await requireSessionUser(request); + const rows = await db` + SELECT * + FROM filings + WHERE accession_number = ${params.accessionNumber} + LIMIT 1 + `; - return { success: true, count: newFilings.length }; + if (!rows[0]) { + set.status = 404; + return { error: 'Filing not found' }; + } + + return { filing: rows[0] }; + } catch (error) { + return toHttpError(set, error); + } + }, { + params: t.Object({ + accessionNumber: t.String({ minLength: 8 }) + }) + }) + .post('/sync', async ({ request, set, body }) => { + try { + const user = await requireSessionUser(request); + const task = await enqueueTask({ + taskType: 'sync_filings', + payload: { + ticker: body.ticker.trim().toUpperCase(), + limit: body.limit ?? 20 + }, + createdBy: user.id, + priority: 90 + }); + + return { task }; + } catch (error) { + return toHttpError(set, error); + } + }, { + body: t.Object({ + ticker: t.String({ minLength: 1, maxLength: 12 }), + limit: t.Optional(t.Number({ minimum: 1, maximum: 50 })) + }) + }) + .post('/:accessionNumber/analyze', async ({ request, set, params }) => { + try { + const user = await requireSessionUser(request); + const task = await enqueueTask({ + taskType: 'analyze_filing', + payload: { + accessionNumber: params.accessionNumber + }, + createdBy: user.id, + priority: 65 + }); + + return { task }; + } catch (error) { + return toHttpError(set, error); + } + }, { + params: t.Object({ + accessionNumber: t.String({ minLength: 8 }) + }) }); diff --git a/backend/src/routes/me.ts b/backend/src/routes/me.ts new file mode 100644 index 0000000..097bb0c --- /dev/null +++ b/backend/src/routes/me.ts @@ -0,0 +1,13 @@ +import { Elysia } from 'elysia'; +import { requireSessionUser } from '../session'; +import { toHttpError } from './error'; + +export const meRoutes = new Elysia({ prefix: '/me' }) + .get('/', async ({ request, set }) => { + try { + const user = await requireSessionUser(request); + return { user }; + } catch (error) { + return toHttpError(set, error); + } + }); diff --git a/backend/src/routes/openclaw.ts b/backend/src/routes/openclaw.ts index 318815b..b9efc75 100644 --- a/backend/src/routes/openclaw.ts +++ b/backend/src/routes/openclaw.ts @@ -1,121 +1,53 @@ import { Elysia, t } from 'elysia'; -import { db } from '../db'; +import { env } from '../config'; +import { requireSessionUser } from '../session'; +import { enqueueTask } from '../tasks/repository'; +import { toHttpError } from './error'; -interface OpenClawMessage { - text: string; - channelId?: string; -} - -export const openclawRoutes = new Elysia({ prefix: '/openclaw' }) - /** - * Trigger Discord notification for new filing - */ - .post('/notify/filing', async ({ body }) => { - // This endpoint can be called by cron jobs or external webhooks - // to send Discord notifications about new filings - - const message = `📄 **New SEC Filing** - -**Ticker:** ${body.ticker} -**Type:** ${body.filingType} -**Date:** ${body.filingDate} - -View details: ${body.url}`; - - // In production, this would send to Discord via webhook - // For now, we just log it - console.log('[DISCORD]', message); - - return { success: true, message }; - }, { - body: t.Object({ - ticker: t.String(), - filingType: t.String(), - filingDate: t.String(), - url: t.String() - }) - }) - - /** - * Get AI insights for portfolio - */ - .post('/insights/portfolio', async ({ body }) => { - const holdings = await db` - SELECT * FROM portfolio - WHERE user_id = ${body.userId} - `; - - // Generate AI analysis - const prompt = ` -Analyze this portfolio: - -${JSON.stringify(holdings, null, 2)} - -Provide: -1. Overall portfolio health assessment -2. Risk analysis -3. Top 3 recommendations -4. Any concerning patterns - `; - - // This would call OpenClaw's AI - // For now, return placeholder - return { - health: 'moderate', - risk: 'medium', - recommendations: [ - 'Consider diversifying sector exposure', - 'Review underperforming positions', - 'Rebalance portfolio' - ], - analysis: 'Portfolio shows mixed performance with some concentration risk.' - }; - }, { - body: t.Object({ - userId: t.String() - }) - }) - - /** - * Get AI insights for a specific filing - */ - .post('/insights/filing', async ({ body }) => { - const filing = await db` - SELECT * FROM filings - WHERE accession_number = ${body.accessionNumber} - `; - - if (!filing) { - return { error: 'Filing not found' }; +export const openclawRoutes = new Elysia({ prefix: '/ai' }) + .get('/status', async ({ request, set }) => { + try { + await requireSessionUser(request); + return { + configured: Boolean(env.OPENCLAW_BASE_URL && env.OPENCLAW_API_KEY), + baseUrl: env.OPENCLAW_BASE_URL ?? null, + model: env.OPENCLAW_MODEL + }; + } catch (error) { + return toHttpError(set, error); } + }) + .post('/portfolio-insights', async ({ request, set }) => { + try { + const user = await requireSessionUser(request); + const task = await enqueueTask({ + taskType: 'portfolio_insights', + payload: { userId: user.id }, + createdBy: user.id, + priority: 70 + }); - const prompt = ` -Analyze this SEC filing: + return { task }; + } catch (error) { + return toHttpError(set, error); + } + }) + .post('/filing-insights', async ({ request, set, body }) => { + try { + const user = await requireSessionUser(request); + const task = await enqueueTask({ + taskType: 'analyze_filing', + payload: { accessionNumber: body.accessionNumber }, + createdBy: user.id, + priority: 65 + }); -**Company:** ${filing.company_name} -**Ticker:** ${filing.ticker} -**Type:** ${filing.filing_type} -**Date:** ${filing.filing_date} - -**Key Metrics:** -${JSON.stringify(filing.key_metrics, null, 2)} - -Provide key insights and any red flags. - `; - - // Store insights - await db` - UPDATE filings - SET insights = ${prompt} - WHERE accession_number = ${body.accessionNumber} - `; - - return { - insights: 'Analysis saved', - filing - }; + return { task }; + } catch (error) { + return toHttpError(set, error); + } }, { body: t.Object({ - accessionNumber: t.String() + accessionNumber: t.String({ minLength: 8 }) }) }); diff --git a/backend/src/routes/portfolio.ts b/backend/src/routes/portfolio.ts index 8a7e820..e46b9b8 100644 --- a/backend/src/routes/portfolio.ts +++ b/backend/src/routes/portfolio.ts @@ -1,65 +1,197 @@ import { Elysia, t } from 'elysia'; -import { db, type Portfolio } from '../db'; +import { db } from '../db'; +import { requireSessionUser } from '../session'; +import { enqueueTask } from '../tasks/repository'; +import { toHttpError } from './error'; export const portfolioRoutes = new Elysia({ prefix: '/portfolio' }) - .get('/:userId', async ({ params }) => { - const holdings = await db` - SELECT * FROM portfolio - WHERE user_id = ${params.userId} - ORDER BY ticker - `; + .get('/holdings', async ({ request, set }) => { + try { + const user = await requireSessionUser(request); + const holdings = await db` + SELECT + id, + user_id, + ticker, + shares, + avg_cost, + current_price, + market_value, + gain_loss, + gain_loss_pct, + last_price_at, + created_at, + updated_at + FROM holdings + WHERE user_id = ${user.id} + ORDER BY market_value DESC, ticker ASC + `; - return holdings; + return { holdings }; + } catch (error) { + return toHttpError(set, error); + } }) + .post('/holdings', async ({ request, set, body }) => { + try { + const user = await requireSessionUser(request); + const ticker = body.ticker.trim().toUpperCase(); - .post('/', async ({ body }) => { - const result = await db` - INSERT INTO portfolio ${db(body as Portfolio)} - ON CONFLICT (user_id, ticker) - DO UPDATE SET - shares = EXCLUDED.shares, - avg_cost = EXCLUDED.avg_cost, - current_price = EXCLUDED.current_price - RETURNING * - `; + const rows = await db` + INSERT INTO holdings ( + user_id, + ticker, + shares, + avg_cost, + current_price + ) VALUES ( + ${user.id}, + ${ticker}, + ${body.shares}, + ${body.avgCost}, + ${body.currentPrice ?? null} + ) + ON CONFLICT (user_id, ticker) + DO UPDATE SET + shares = EXCLUDED.shares, + avg_cost = EXCLUDED.avg_cost, + current_price = COALESCE(EXCLUDED.current_price, holdings.current_price), + updated_at = NOW() + RETURNING * + `; - return result[0]; + return { holding: rows[0] }; + } catch (error) { + return toHttpError(set, error); + } }, { body: t.Object({ - user_id: t.String(), - ticker: t.String(), - shares: t.Number(), - avg_cost: t.Number(), - current_price: t.Optional(t.Number()) + ticker: t.String({ minLength: 1, maxLength: 12 }), + shares: t.Number({ minimum: 0.0001 }), + avgCost: t.Number({ minimum: 0.0001 }), + currentPrice: t.Optional(t.Number({ minimum: 0 })) }) }) + .patch('/holdings/:id', async ({ request, set, params, body }) => { + try { + const user = await requireSessionUser(request); + const rows = await db` + UPDATE holdings + SET + shares = COALESCE(${body.shares ?? null}, shares), + avg_cost = COALESCE(${body.avgCost ?? null}, avg_cost), + current_price = COALESCE(${body.currentPrice ?? null}, current_price), + updated_at = NOW() + WHERE id = ${params.id} + AND user_id = ${user.id} + RETURNING * + `; - .put('/:id', async ({ params, body }) => { - const result = await db` - UPDATE portfolio - SET ${db(body)} - WHERE id = ${params.id} - RETURNING * - `; + if (!rows[0]) { + set.status = 404; + return { error: 'Holding not found' }; + } - return result[0] || null; + return { holding: rows[0] }; + } catch (error) { + return toHttpError(set, error); + } + }, { + params: t.Object({ + id: t.Numeric() + }), + body: t.Object({ + shares: t.Optional(t.Number({ minimum: 0.0001 })), + avgCost: t.Optional(t.Number({ minimum: 0.0001 })), + currentPrice: t.Optional(t.Number({ minimum: 0 })) + }) }) + .delete('/holdings/:id', async ({ request, set, params }) => { + try { + const user = await requireSessionUser(request); + const rows = await db` + DELETE FROM holdings + WHERE id = ${params.id} + AND user_id = ${user.id} + RETURNING id + `; - .delete('/:id', async ({ params }) => { - await db`DELETE FROM portfolio WHERE id = ${params.id}`; - return { success: true }; + if (!rows[0]) { + set.status = 404; + return { error: 'Holding not found' }; + } + + return { success: true }; + } catch (error) { + return toHttpError(set, error); + } + }, { + params: t.Object({ + id: t.Numeric() + }) }) + .get('/summary', async ({ request, set }) => { + try { + const user = await requireSessionUser(request); + const rows = await db` + SELECT + COUNT(*)::int AS positions, + COALESCE(SUM(market_value), 0)::numeric AS total_value, + COALESCE(SUM(gain_loss), 0)::numeric AS total_gain_loss, + COALESCE(SUM(shares * avg_cost), 0)::numeric AS total_cost_basis, + COALESCE(AVG(gain_loss_pct), 0)::numeric AS avg_return_pct + FROM holdings + WHERE user_id = ${user.id} + `; - .get('/:userId/summary', async ({ params }) => { - const summary = await db` - SELECT - COUNT(*) as total_positions, - COALESCE(SUM(current_value), 0) as total_value, - COALESCE(SUM(gain_loss), 0) as total_gain_loss, - COALESCE(SUM(current_value) - SUM(shares * avg_cost), 0) as cost_basis - FROM portfolio - WHERE user_id = ${params.userId} - `; + return { summary: rows[0] }; + } catch (error) { + return toHttpError(set, error); + } + }) + .post('/refresh-prices', async ({ request, set }) => { + try { + const user = await requireSessionUser(request); + const task = await enqueueTask({ + taskType: 'refresh_prices', + payload: { userId: user.id }, + createdBy: user.id, + priority: 80 + }); - return summary[0]; + return { task }; + } catch (error) { + return toHttpError(set, error); + } + }) + .post('/insights/generate', async ({ request, set }) => { + try { + const user = await requireSessionUser(request); + const task = await enqueueTask({ + taskType: 'portfolio_insights', + payload: { userId: user.id }, + createdBy: user.id, + priority: 70 + }); + + return { task }; + } catch (error) { + return toHttpError(set, error); + } + }) + .get('/insights/latest', async ({ request, set }) => { + try { + const user = await requireSessionUser(request); + const rows = await db` + SELECT id, user_id, provider, model, content, created_at + FROM portfolio_insights + WHERE user_id = ${user.id} + ORDER BY created_at DESC + LIMIT 1 + `; + + return { insight: rows[0] ?? null }; + } catch (error) { + return toHttpError(set, error); + } }); diff --git a/backend/src/routes/tasks.ts b/backend/src/routes/tasks.ts new file mode 100644 index 0000000..816ce44 --- /dev/null +++ b/backend/src/routes/tasks.ts @@ -0,0 +1,40 @@ +import { Elysia, t } from 'elysia'; +import { requireSessionUser } from '../session'; +import { getTaskById, listRecentTasks } from '../tasks/repository'; +import { toHttpError } from './error'; + +export const taskRoutes = new Elysia({ prefix: '/tasks' }) + .get('/', async ({ request, set, query }) => { + try { + const user = await requireSessionUser(request); + const limit = Number(query.limit ?? 20); + const safeLimit = Number.isFinite(limit) ? Math.min(Math.max(limit, 1), 50) : 20; + const tasks = await listRecentTasks(user.id, safeLimit); + return { tasks }; + } catch (error) { + return toHttpError(set, error); + } + }, { + query: t.Object({ + limit: t.Optional(t.Numeric()) + }) + }) + .get('/:taskId', async ({ request, set, params }) => { + try { + const user = await requireSessionUser(request); + const task = await getTaskById(params.taskId, user.id); + + if (!task) { + set.status = 404; + return { error: 'Task not found' }; + } + + return { task }; + } catch (error) { + return toHttpError(set, error); + } + }, { + params: t.Object({ + taskId: t.String() + }) + }); diff --git a/backend/src/routes/watchlist.ts b/backend/src/routes/watchlist.ts index e97595b..2d85dea 100644 --- a/backend/src/routes/watchlist.ts +++ b/backend/src/routes/watchlist.ts @@ -1,35 +1,80 @@ import { Elysia, t } from 'elysia'; import { db } from '../db'; +import { requireSessionUser } from '../session'; +import { toHttpError } from './error'; export const watchlistRoutes = new Elysia({ prefix: '/watchlist' }) - .get('/:userId', async ({ params }) => { - const watchlist = await db` - SELECT * FROM watchlist - WHERE user_id = ${params.userId} - ORDER BY created_at DESC - `; + .get('/', async ({ request, set }) => { + try { + const user = await requireSessionUser(request); + const watchlist = await db` + SELECT id, user_id, ticker, company_name, sector, created_at + FROM watchlist + WHERE user_id = ${user.id} + ORDER BY created_at DESC + `; - return watchlist; + return { items: watchlist }; + } catch (error) { + return toHttpError(set, error); + } }) + .post('/', async ({ request, set, body }) => { + try { + const user = await requireSessionUser(request); + const ticker = body.ticker.trim().toUpperCase(); - .post('/', async ({ body }) => { - const result = await db` - INSERT INTO watchlist ${db(body)} - ON CONFLICT (user_id, ticker) DO NOTHING - RETURNING * - `; + const rows = await db` + INSERT INTO watchlist ( + user_id, + ticker, + company_name, + sector + ) VALUES ( + ${user.id}, + ${ticker}, + ${body.companyName.trim()}, + ${body.sector?.trim() || null} + ) + ON CONFLICT (user_id, ticker) + DO UPDATE SET + company_name = EXCLUDED.company_name, + sector = EXCLUDED.sector + RETURNING * + `; - return result[0]; + return { item: rows[0] }; + } catch (error) { + return toHttpError(set, error); + } }, { body: t.Object({ - user_id: t.String(), - ticker: t.String(), - company_name: t.String(), - sector: t.Optional(t.String()) + ticker: t.String({ minLength: 1, maxLength: 12 }), + companyName: t.String({ minLength: 1, maxLength: 200 }), + sector: t.Optional(t.String({ maxLength: 120 })) }) }) + .delete('/:id', async ({ request, set, params }) => { + try { + const user = await requireSessionUser(request); + const rows = await db` + DELETE FROM watchlist + WHERE id = ${params.id} + AND user_id = ${user.id} + RETURNING id + `; - .delete('/:id', async ({ params }) => { - await db`DELETE FROM watchlist WHERE id = ${params.id}`; - return { success: true }; + if (!rows[0]) { + set.status = 404; + return { error: 'Watchlist item not found' }; + } + + return { success: true }; + } catch (error) { + return toHttpError(set, error); + } + }, { + params: t.Object({ + id: t.Numeric() + }) }); diff --git a/backend/src/services/openclaw.ts b/backend/src/services/openclaw.ts new file mode 100644 index 0000000..58b9b81 --- /dev/null +++ b/backend/src/services/openclaw.ts @@ -0,0 +1,61 @@ +import { env } from '../config'; + +type ChatCompletionResponse = { + choices?: Array<{ + message?: { + content?: string; + }; + }>; +}; + +export class OpenClawService { + isConfigured() { + return Boolean(env.OPENCLAW_BASE_URL && env.OPENCLAW_API_KEY); + } + + async runAnalysis(prompt: string, systemPrompt?: string) { + if (!this.isConfigured()) { + return { + provider: 'local-fallback', + model: env.OPENCLAW_MODEL, + text: 'OpenClaw/ZeroClaw is not configured. Set OPENCLAW_BASE_URL and OPENCLAW_API_KEY to enable live AI analysis.' + }; + } + + const response = await fetch(`${env.OPENCLAW_BASE_URL}/v1/chat/completions`, { + method: 'POST', + headers: { + 'Content-Type': 'application/json', + Authorization: `Bearer ${env.OPENCLAW_API_KEY}` + }, + body: JSON.stringify({ + model: env.OPENCLAW_MODEL, + temperature: 0.2, + messages: [ + systemPrompt + ? { role: 'system', content: systemPrompt } + : null, + { role: 'user', content: prompt } + ].filter(Boolean) + }) + }); + + if (!response.ok) { + const body = await response.text(); + throw new Error(`OpenClaw request failed (${response.status}): ${body.slice(0, 200)}`); + } + + const payload = await response.json() as ChatCompletionResponse; + const text = payload.choices?.[0]?.message?.content?.trim(); + + if (!text) { + throw new Error('OpenClaw returned an empty response'); + } + + return { + provider: 'openclaw', + model: env.OPENCLAW_MODEL, + text + }; + } +} diff --git a/backend/src/services/prices.ts b/backend/src/services/prices.ts index 9169e5f..0a209f4 100644 --- a/backend/src/services/prices.ts +++ b/backend/src/services/prices.ts @@ -1,116 +1,72 @@ +import { db } from '../db'; + +const YAHOO_BASE = 'https://query1.finance.yahoo.com/v8/finance/chart'; + export class PriceService { - private baseUrl = 'https://query1.finance.yahoo.com/v8/finance/chart'; + async getQuote(ticker: string): Promise { + const normalizedTicker = ticker.trim().toUpperCase(); - /** - * Get current price for a ticker - */ - async getPrice(ticker: string): Promise { try { - const response = await fetch( - `${this.baseUrl}/${ticker}?interval=1d&range=1d`, - { - headers: { - 'User-Agent': 'Mozilla/5.0 (compatible; FiscalClone/1.0)' - } + const response = await fetch(`${YAHOO_BASE}/${normalizedTicker}?interval=1d&range=1d`, { + headers: { + 'User-Agent': 'Mozilla/5.0 (compatible; FiscalClone/2.0)' } - ); + }); - if (!response.ok) return null; - - const data = await response.json(); - const result = data.chart?.result?.[0]; - - if (!result?.meta?.regularMarketPrice) { + if (!response.ok) { return null; } - return result.meta.regularMarketPrice; - } catch (error) { - console.error(`Error fetching price for ${ticker}:`, error); + const payload = await response.json() as { + chart?: { + result?: Array<{ meta?: { regularMarketPrice?: number } }>; + }; + }; + + const price = payload.chart?.result?.[0]?.meta?.regularMarketPrice; + + return typeof price === 'number' ? price : null; + } catch { return null; } } - /** - * Get historical prices - */ - async getHistoricalPrices(ticker: string, period: string = '1y'): Promise> { - try { - const response = await fetch( - `${this.baseUrl}/${ticker}?interval=1d&range=${period}`, - { - headers: { - 'User-Agent': 'Mozilla/5.0 (compatible; FiscalClone/1.0)' - } - } - ); + async refreshHoldingsPrices(userId?: number) { + const holdings = userId + ? await db`SELECT DISTINCT ticker FROM holdings WHERE user_id = ${userId}` + : await db`SELECT DISTINCT ticker FROM holdings`; - if (!response.ok) return []; + let updatedCount = 0; - const data = await response.json(); - const result = data.chart?.result?.[0]; + for (const holding of holdings) { + const price = await this.getQuote(holding.ticker); - if (!result?.timestamp || !result?.indicators?.quote?.[0]?.close) { - return []; + if (price === null) { + continue; } - const timestamps = result.timestamp; - const closes = result.indicators.quote[0].close; - - return timestamps.map((ts: number, i: number) => ({ - date: new Date(ts * 1000).toISOString(), - price: closes[i] - })).filter((p: any) => p.price !== null); - } catch (error) { - console.error(`Error fetching historical prices for ${ticker}:`, error); - return []; - } - } - - /** - * Update all portfolio prices - */ - async updateAllPrices(db: any) { - const holdings = await db` - SELECT DISTINCT ticker FROM portfolio - `; - - let updated = 0; - - for (const { ticker } of holdings) { - const price = await this.getPrice(ticker); - - if (price) { + if (userId) { await db` - UPDATE portfolio - SET current_price = ${price} - WHERE ticker = ${ticker} + UPDATE holdings + SET current_price = ${price}, last_price_at = NOW(), updated_at = NOW() + WHERE user_id = ${userId} AND ticker = ${holding.ticker} + `; + } else { + await db` + UPDATE holdings + SET current_price = ${price}, last_price_at = NOW(), updated_at = NOW() + WHERE ticker = ${holding.ticker} `; - updated++; } - // Rate limiting - await new Promise(resolve => setTimeout(resolve, 100)); + updatedCount += 1; + + await Bun.sleep(120); } - console.log(`Updated ${updated} stock prices`); - } - - /** - * Get quote for multiple tickers - */ - async getQuotes(tickers: string[]): Promise> { - const quotes: Record = {}; - - await Promise.all( - tickers.map(async ticker => { - const price = await this.getPrice(ticker); - if (price) { - quotes[ticker] = price; - } - }) - ); - - return quotes; + return { + updatedCount, + totalTickers: holdings.length + }; } } diff --git a/backend/src/services/sec.ts b/backend/src/services/sec.ts index 05f395a..118db06 100644 --- a/backend/src/services/sec.ts +++ b/backend/src/services/sec.ts @@ -1,162 +1,208 @@ -import { type Filings } from '../db'; +import { env } from '../config'; +import type { FilingMetrics, FilingType } from '../types'; -export class SECScraper { - private baseUrl = 'https://www.sec.gov'; - private userAgent = 'Fiscal Clone (contact@example.com)'; +type TickerDirectoryRecord = { + cik_str: number; + ticker: string; + title: string; +}; - /** - * Search SEC filings by ticker - */ - async searchFilings(ticker: string, count = 20): Promise { - const cik = await this.getCIK(ticker); +type RecentFilingsPayload = { + filings?: { + recent?: { + accessionNumber?: string[]; + filingDate?: string[]; + form?: string[]; + primaryDocument?: string[]; + }; + }; + cik?: string; + name?: string; +}; - const response = await fetch( - `https://data.sec.gov/submissions/CIK${cik.padStart(10, '0')}.json`, - { - headers: { - 'User-Agent': this.userAgent - } +type CompanyFactsPayload = { + facts?: { + 'us-gaap'?: Record> }>; + }; +}; + +export type SecFiling = { + ticker: string; + cik: string; + companyName: string; + filingType: FilingType; + filingDate: string; + accessionNumber: string; + filingUrl: string | null; +}; + +const SUPPORTED_FORMS: FilingType[] = ['10-K', '10-Q', '8-K']; +const TICKER_CACHE_TTL_MS = 1000 * 60 * 60 * 24; +const FACTS_CACHE_TTL_MS = 1000 * 60 * 10; + +export class SecService { + private tickerCache: Map = new Map(); + private tickerCacheLoadedAt = 0; + private factsCache: Map = new Map(); + + private async fetchJson(url: string): Promise { + const response = await fetch(url, { + headers: { + 'User-Agent': env.SEC_USER_AGENT, + Accept: 'application/json' } - ); + }); if (!response.ok) { - throw new Error(`SEC API error: ${response.status}`); + throw new Error(`SEC request failed (${response.status}) for ${url}`); } - const data = await response.json(); - const filings = data.filings?.recent || []; - - const filteredFilings = filings - .filter((f: any) => - ['10-K', '10-Q', '8-K'].includes(f.form) - ) - .slice(0, count) - .map((f: any) => ({ - ticker, - filing_type: f.form, - filing_date: new Date(f.filingDate), - accession_number: f.accessionNumber, - cik: data.cik, - company_name: data.name || ticker, - })); - - return filteredFilings; + return await response.json() as T; } - /** - * Check for new filings and save to database - */ - async checkNewFilings(db: any) { - const tickers = await db` - SELECT DISTINCT ticker FROM watchlist - `; + private async ensureTickerCache() { + const isFresh = Date.now() - this.tickerCacheLoadedAt < TICKER_CACHE_TTL_MS; - console.log(`Checking filings for ${tickers.length} tickers...`); - - for (const { ticker } of tickers) { - try { - const latest = await db` - SELECT accession_number FROM filings - WHERE ticker = ${ticker} - ORDER BY filing_date DESC - LIMIT 1 - `; - - const filings = await this.searchFilings(ticker, 10); - const newFilings = filings.filter( - f => !latest.some((l: any) => l.accession_number === f.accession_number) - ); - - if (newFilings.length > 0) { - console.log(`Found ${newFilings.length} new filings for ${ticker}`); - - for (const filing of newFilings) { - const metrics = await this.extractKeyMetrics(filing); - - await db` - INSERT INTO filings ${db(filing, metrics)} - ON CONFLICT (accession_number) DO NOTHING - `; - } - } - } catch (error) { - console.error(`Error checking filings for ${ticker}:`, error); - } - } - } - - /** - * Get CIK for a ticker - */ - private async getCIK(ticker: string): Promise { - const response = await fetch( - `https://www.sec.gov/files/company_tickers.json` - ); - - if (!response.ok) { - throw new Error('Failed to get company tickers'); + if (isFresh && this.tickerCache.size > 0) { + return; } - const data = await response.json(); - const companies = data.data; + const payload = await this.fetchJson>('https://www.sec.gov/files/company_tickers.json'); + const nextCache = new Map(); - for (const [cik, company] of Object.entries(companies)) { - if (company.ticker === ticker.toUpperCase()) { - return cik; - } + for (const record of Object.values(payload)) { + nextCache.set(record.ticker.toUpperCase(), record); } - throw new Error(`Ticker ${ticker} not found`); + this.tickerCache = nextCache; + this.tickerCacheLoadedAt = Date.now(); } - /** - * Extract key metrics from filing - */ - async extractKeyMetrics(filing: any): Promise { - try { - const filingUrl = `${this.baseUrl}/Archives/${filing.accession_number.replace(/-/g, '')}/${filing.accession_number}-index.htm`; + async resolveTicker(ticker: string) { + await this.ensureTickerCache(); - const response = await fetch(filingUrl, { - headers: { 'User-Agent': this.userAgent } - }); + const normalizedTicker = ticker.trim().toUpperCase(); + const record = this.tickerCache.get(normalizedTicker); - if (!response.ok) return null; - - const html = await response.text(); - - // Extract key financial metrics from XBRL - const metrics = { - revenue: this.extractMetric(html, 'Revenues'), - netIncome: this.extractMetric(html, 'NetIncomeLoss'), - totalAssets: this.extractMetric(html, 'Assets'), - cash: this.extractMetric(html, 'CashAndCashEquivalentsAtCarryingValue'), - debt: this.extractMetric(html, 'LongTermDebt') - }; - - return metrics; - } catch (error) { - console.error('Error extracting metrics:', error); - return null; + if (!record) { + throw new Error(`Ticker ${normalizedTicker} was not found in SEC directory`); } - } - - /** - * Extract a specific metric from XBRL data - */ - private extractMetric(html: string, metricName: string): number | null { - const regex = new RegExp(`]*name="[^"]*${metricName}[^"]*"[^>]*>([^<]+)<`, 'i'); - const match = html.match(regex); - return match ? parseFloat(match[1].replace(/,/g, '')) : null; - } - - /** - * Get filing details by accession number - */ - async getFilingDetails(accessionNumber: string) { - const filingUrl = `${this.baseUrl}/Archives/${accessionNumber.replace(/-/g, '')}/${accessionNumber}-index.htm`; return { - filing_url: filingUrl + ticker: normalizedTicker, + cik: String(record.cik_str), + companyName: record.title }; } + + async fetchRecentFilings(ticker: string, limit = 20): Promise { + const company = await this.resolveTicker(ticker); + const cikPadded = company.cik.padStart(10, '0'); + + const payload = await this.fetchJson(`https://data.sec.gov/submissions/CIK${cikPadded}.json`); + const recent = payload.filings?.recent; + + if (!recent) { + return []; + } + + const forms = recent.form ?? []; + const accessionNumbers = recent.accessionNumber ?? []; + const filingDates = recent.filingDate ?? []; + const primaryDocuments = recent.primaryDocument ?? []; + const filings: SecFiling[] = []; + + for (let i = 0; i < forms.length; i += 1) { + const filingType = forms[i] as FilingType; + + if (!SUPPORTED_FORMS.includes(filingType)) { + continue; + } + + const accessionNumber = accessionNumbers[i]; + + if (!accessionNumber) { + continue; + } + + const compactAccession = accessionNumber.replace(/-/g, ''); + const documentName = primaryDocuments[i]; + const filingUrl = documentName + ? `https://www.sec.gov/Archives/edgar/data/${Number(company.cik)}/${compactAccession}/${documentName}` + : null; + + filings.push({ + ticker: company.ticker, + cik: company.cik, + companyName: payload.name ?? company.companyName, + filingType, + filingDate: filingDates[i] ?? new Date().toISOString().slice(0, 10), + accessionNumber, + filingUrl + }); + + if (filings.length >= limit) { + break; + } + } + + return filings; + } + + private pickLatestFact(payload: CompanyFactsPayload, tag: string): number | null { + const unitCollections = payload.facts?.['us-gaap']?.[tag]?.units; + + if (!unitCollections) { + return null; + } + + const preferredUnits = ['USD', 'USD/shares']; + + for (const unit of preferredUnits) { + const series = unitCollections[unit]; + if (!series?.length) { + continue; + } + + const best = [...series] + .filter((item) => typeof item.val === 'number') + .sort((a, b) => { + const aDate = Date.parse(a.filed ?? a.end ?? '1970-01-01'); + const bDate = Date.parse(b.filed ?? b.end ?? '1970-01-01'); + return bDate - aDate; + })[0]; + + if (best?.val !== undefined) { + return best.val; + } + } + + return null; + } + + async fetchMetrics(cik: string): Promise { + const normalized = cik.padStart(10, '0'); + const cached = this.factsCache.get(normalized); + + if (cached && Date.now() - cached.loadedAt < FACTS_CACHE_TTL_MS) { + return cached.metrics; + } + + const payload = await this.fetchJson(`https://data.sec.gov/api/xbrl/companyfacts/CIK${normalized}.json`); + + const metrics: FilingMetrics = { + revenue: this.pickLatestFact(payload, 'Revenues'), + netIncome: this.pickLatestFact(payload, 'NetIncomeLoss'), + totalAssets: this.pickLatestFact(payload, 'Assets'), + cash: this.pickLatestFact(payload, 'CashAndCashEquivalentsAtCarryingValue'), + debt: this.pickLatestFact(payload, 'LongTermDebt') + }; + + this.factsCache.set(normalized, { + loadedAt: Date.now(), + metrics + }); + + return metrics; + } } diff --git a/backend/src/session.ts b/backend/src/session.ts new file mode 100644 index 0000000..fae4389 --- /dev/null +++ b/backend/src/session.ts @@ -0,0 +1,30 @@ +import { auth } from './auth'; +import type { SessionUser } from './types'; + +export class UnauthorizedError extends Error { + constructor(message = 'Authentication required') { + super(message); + this.name = 'UnauthorizedError'; + } +} + +export async function requireSessionUser(request: Request): Promise { + const session = await auth.api.getSession({ headers: request.headers }); + + if (!session?.user?.id) { + throw new UnauthorizedError(); + } + + const userId = Number(session.user.id); + + if (!Number.isFinite(userId)) { + throw new UnauthorizedError('Invalid session user id'); + } + + return { + id: userId, + email: session.user.email, + name: session.user.name ?? null, + image: session.user.image ?? null + }; +} diff --git a/backend/src/tasks/processors.ts b/backend/src/tasks/processors.ts new file mode 100644 index 0000000..5bd3954 --- /dev/null +++ b/backend/src/tasks/processors.ts @@ -0,0 +1,201 @@ +import { z } from 'zod'; +import { db } from '../db'; +import { OpenClawService } from '../services/openclaw'; +import { PriceService } from '../services/prices'; +import { SecService } from '../services/sec'; +import type { LongTaskRecord, TaskType } from '../types'; + +const secService = new SecService(); +const priceService = new PriceService(); +const openClawService = new OpenClawService(); + +const syncFilingsPayload = z.object({ + ticker: z.string().min(1), + limit: z.number().int().positive().max(50).default(20) +}); + +const refreshPricesPayload = z.object({ + userId: z.number().int().positive().optional() +}); + +const analyzeFilingPayload = z.object({ + accessionNumber: z.string().min(8) +}); + +const portfolioInsightsPayload = z.object({ + userId: z.number().int().positive() +}); + +async function processSyncFilings(task: LongTaskRecord) { + const { ticker, limit } = syncFilingsPayload.parse(task.payload); + const filings = await secService.fetchRecentFilings(ticker, limit); + const metrics = filings.length > 0 + ? await secService.fetchMetrics(filings[0].cik) + : null; + + let touched = 0; + + for (const filing of filings) { + await db` + INSERT INTO filings ( + ticker, + filing_type, + filing_date, + accession_number, + cik, + company_name, + filing_url, + metrics, + updated_at + ) VALUES ( + ${filing.ticker}, + ${filing.filingType}, + ${filing.filingDate}, + ${filing.accessionNumber}, + ${filing.cik}, + ${filing.companyName}, + ${filing.filingUrl}, + ${metrics}, + NOW() + ) + ON CONFLICT (accession_number) + DO UPDATE SET + filing_type = EXCLUDED.filing_type, + filing_date = EXCLUDED.filing_date, + filing_url = EXCLUDED.filing_url, + metrics = COALESCE(EXCLUDED.metrics, filings.metrics), + updated_at = NOW() + `; + + touched += 1; + } + + return { + ticker: ticker.toUpperCase(), + filingsFetched: filings.length, + recordsUpserted: touched, + metrics + }; +} + +async function processRefreshPrices(task: LongTaskRecord) { + const { userId } = refreshPricesPayload.parse(task.payload); + const result = await priceService.refreshHoldingsPrices(userId); + + return { + scope: userId ? `user:${userId}` : 'global', + ...result + }; +} + +async function processAnalyzeFiling(task: LongTaskRecord) { + const { accessionNumber } = analyzeFilingPayload.parse(task.payload); + + const rows = await db` + SELECT * + FROM filings + WHERE accession_number = ${accessionNumber} + LIMIT 1 + `; + + const filing = rows[0]; + + if (!filing) { + throw new Error(`Filing ${accessionNumber} was not found`); + } + + const prompt = [ + 'You are a fiscal research assistant focused on regulatory signals.', + `Analyze this SEC filing from ${filing.company_name} (${filing.ticker}).`, + `Form: ${filing.filing_type}`, + `Filed: ${filing.filing_date}`, + `Metrics JSON: ${JSON.stringify(filing.metrics ?? {})}`, + 'Return concise sections: Thesis, Red Flags, Follow-up Questions, Portfolio Impact.' + ].join('\n'); + + const analysis = await openClawService.runAnalysis(prompt, 'Use concise institutional analyst language.'); + + await db` + UPDATE filings + SET analysis = ${analysis}, + updated_at = NOW() + WHERE accession_number = ${accessionNumber} + `; + + return { + accessionNumber, + analysis + }; +} + +async function processPortfolioInsights(task: LongTaskRecord) { + const { userId } = portfolioInsightsPayload.parse(task.payload); + + const holdings = await db` + SELECT + ticker, + shares, + avg_cost, + current_price, + market_value, + gain_loss, + gain_loss_pct + FROM holdings + WHERE user_id = ${userId} + ORDER BY market_value DESC + `; + + const summaryRows = await db` + SELECT + COUNT(*)::int AS positions, + COALESCE(SUM(market_value), 0)::numeric AS total_value, + COALESCE(SUM(gain_loss), 0)::numeric AS total_gain_loss, + COALESCE(AVG(gain_loss_pct), 0)::numeric AS avg_return_pct + FROM holdings + WHERE user_id = ${userId} + `; + + const summary = summaryRows[0] ?? { + positions: 0, + total_value: 0, + total_gain_loss: 0, + avg_return_pct: 0 + }; + + const prompt = [ + 'Generate portfolio intelligence with actionable recommendations.', + `Portfolio summary: ${JSON.stringify(summary)}`, + `Holdings: ${JSON.stringify(holdings)}`, + 'Respond with: 1) Portfolio health score (0-100), 2) top 3 risks, 3) top 3 opportunities, 4) next actions in 7 days.' + ].join('\n'); + + const insight = await openClawService.runAnalysis(prompt, 'Act as a risk-aware buy-side analyst.'); + + await db` + INSERT INTO portfolio_insights (user_id, model, provider, content) + VALUES (${userId}, ${insight.model}, ${insight.provider}, ${insight.text}) + `; + + return { + userId, + summary, + insight + }; +} + +const processors: Record Promise>> = { + sync_filings: processSyncFilings, + refresh_prices: processRefreshPrices, + analyze_filing: processAnalyzeFiling, + portfolio_insights: processPortfolioInsights +}; + +export async function processTask(task: LongTaskRecord) { + const processor = processors[task.task_type]; + + if (!processor) { + throw new Error(`No processor registered for task ${task.task_type}`); + } + + return await processor(task); +} diff --git a/backend/src/tasks/repository.ts b/backend/src/tasks/repository.ts new file mode 100644 index 0000000..866508c --- /dev/null +++ b/backend/src/tasks/repository.ts @@ -0,0 +1,168 @@ +import { db } from '../db'; +import { env } from '../config'; +import type { LongTaskRecord, TaskType } from '../types'; + +type EnqueueTaskInput = { + taskType: TaskType; + payload: Record; + createdBy?: number; + priority?: number; + scheduledAt?: Date; + maxAttempts?: number; +}; + +export async function enqueueTask(input: EnqueueTaskInput) { + const task = await db` + INSERT INTO long_tasks ( + task_type, + status, + priority, + payload, + max_attempts, + scheduled_at, + created_by + ) VALUES ( + ${input.taskType}, + 'queued', + ${input.priority ?? 50}, + ${input.payload}, + ${input.maxAttempts ?? env.TASK_MAX_ATTEMPTS}, + ${input.scheduledAt ?? new Date()}, + ${input.createdBy ?? null} + ) + RETURNING * + `; + + return task[0]; +} + +export async function getTaskById(taskId: string, userId?: number) { + const rows = userId + ? await db` + SELECT * + FROM long_tasks + WHERE id = ${taskId} + AND (created_by IS NULL OR created_by = ${userId}) + LIMIT 1 + ` + : await db` + SELECT * + FROM long_tasks + WHERE id = ${taskId} + LIMIT 1 + `; + + return rows[0] ?? null; +} + +export async function listRecentTasks(userId: number, limit = 20) { + return await db` + SELECT * + FROM long_tasks + WHERE created_by = ${userId} + ORDER BY created_at DESC + LIMIT ${limit} + `; +} + +export async function claimNextTask() { + const staleSeconds = env.TASK_STALE_SECONDS; + + return await db.begin(async (tx) => { + await tx` + UPDATE long_tasks + SET status = 'queued', + heartbeat_at = NULL, + started_at = NULL, + updated_at = NOW(), + error = COALESCE(error, 'Task lease expired and was re-queued') + WHERE status = 'running' + AND heartbeat_at IS NOT NULL + AND heartbeat_at < NOW() - (${staleSeconds}::text || ' seconds')::interval + AND attempts < max_attempts + `; + + await tx` + UPDATE long_tasks + SET status = 'failed', + finished_at = NOW(), + updated_at = NOW(), + error = COALESCE(error, 'Task lease expired and max attempts reached') + WHERE status = 'running' + AND heartbeat_at IS NOT NULL + AND heartbeat_at < NOW() - (${staleSeconds}::text || ' seconds')::interval + AND attempts >= max_attempts + `; + + const rows = await tx` + WITH candidate AS ( + SELECT id + FROM long_tasks + WHERE status = 'queued' + AND scheduled_at <= NOW() + ORDER BY priority DESC, created_at ASC + FOR UPDATE SKIP LOCKED + LIMIT 1 + ) + UPDATE long_tasks t + SET status = 'running', + started_at = COALESCE(t.started_at, NOW()), + heartbeat_at = NOW(), + attempts = t.attempts + 1, + updated_at = NOW() + FROM candidate + WHERE t.id = candidate.id + RETURNING t.* + `; + + return rows[0] ?? null; + }); +} + +export async function heartbeatTask(taskId: string) { + await db` + UPDATE long_tasks + SET heartbeat_at = NOW(), + updated_at = NOW() + WHERE id = ${taskId} + AND status = 'running' + `; +} + +export async function completeTask(taskId: string, result: Record) { + await db` + UPDATE long_tasks + SET status = 'completed', + result = ${result}, + error = NULL, + finished_at = NOW(), + heartbeat_at = NOW(), + updated_at = NOW() + WHERE id = ${taskId} + `; +} + +export async function failTask(task: LongTaskRecord, reason: string, retryDelaySeconds = 20) { + const canRetry = task.attempts < task.max_attempts; + + if (canRetry) { + await db` + UPDATE long_tasks + SET status = 'queued', + error = ${reason}, + scheduled_at = NOW() + (${retryDelaySeconds}::text || ' seconds')::interval, + updated_at = NOW() + WHERE id = ${task.id} + `; + return; + } + + await db` + UPDATE long_tasks + SET status = 'failed', + error = ${reason}, + finished_at = NOW(), + updated_at = NOW() + WHERE id = ${task.id} + `; +} diff --git a/backend/src/tasks/worker-loop.ts b/backend/src/tasks/worker-loop.ts new file mode 100644 index 0000000..7ae5fff --- /dev/null +++ b/backend/src/tasks/worker-loop.ts @@ -0,0 +1,52 @@ +import { env } from '../config'; +import { claimNextTask, completeTask, failTask, heartbeatTask } from './repository'; +import { processTask } from './processors'; + +let keepRunning = true; + +export function stopWorkerLoop() { + keepRunning = false; +} + +function normalizeError(error: unknown) { + if (error instanceof Error) { + return `${error.name}: ${error.message}`; + } + + return String(error); +} + +export async function runWorkerLoop() { + console.log('[worker] started'); + + while (keepRunning) { + const task = await claimNextTask(); + + if (!task) { + await Bun.sleep(700); + continue; + } + + console.log(`[worker] claimed task ${task.id} (${task.task_type})`); + + const heartbeatTimer = setInterval(() => { + void heartbeatTask(task.id).catch((error) => { + console.error(`[worker] heartbeat failed for ${task.id}`, error); + }); + }, env.TASK_HEARTBEAT_SECONDS * 1000); + + try { + const result = await processTask(task); + await completeTask(task.id, result); + console.log(`[worker] completed task ${task.id}`); + } catch (error) { + const normalized = normalizeError(error); + console.error(`[worker] failed task ${task.id}`, normalized); + await failTask(task, normalized); + } finally { + clearInterval(heartbeatTimer); + } + } + + console.log('[worker] stopping'); +} diff --git a/backend/src/types.ts b/backend/src/types.ts new file mode 100644 index 0000000..b3d3a10 --- /dev/null +++ b/backend/src/types.ts @@ -0,0 +1,78 @@ +export type FilingType = '10-K' | '10-Q' | '8-K'; + +export type FilingMetrics = { + revenue: number | null; + netIncome: number | null; + totalAssets: number | null; + cash: number | null; + debt: number | null; +}; + +export type FilingRecord = { + id: number; + ticker: string; + filing_type: FilingType; + filing_date: string; + accession_number: string; + cik: string; + company_name: string; + filing_url: string | null; + metrics: FilingMetrics | null; + analysis: Record | null; + created_at: string; + updated_at: string; +}; + +export type HoldingRecord = { + id: number; + user_id: number; + ticker: string; + shares: string; + avg_cost: string; + current_price: string | null; + market_value: string; + gain_loss: string; + gain_loss_pct: string; + last_price_at: string | null; + created_at: string; + updated_at: string; +}; + +export type WatchlistRecord = { + id: number; + user_id: number; + ticker: string; + company_name: string; + sector: string | null; + created_at: string; +}; + +export type TaskType = 'sync_filings' | 'refresh_prices' | 'analyze_filing' | 'portfolio_insights'; + +export type TaskStatus = 'queued' | 'running' | 'completed' | 'failed'; + +export type LongTaskRecord = { + id: string; + task_type: TaskType; + status: TaskStatus; + priority: number; + payload: Record; + result: Record | null; + error: string | null; + attempts: number; + max_attempts: number; + scheduled_at: string; + started_at: string | null; + heartbeat_at: string | null; + finished_at: string | null; + created_by: number | null; + created_at: string; + updated_at: string; +}; + +export type SessionUser = { + id: number; + email: string; + name: string | null; + image: string | null; +}; diff --git a/backend/src/worker.ts b/backend/src/worker.ts new file mode 100644 index 0000000..daefb43 --- /dev/null +++ b/backend/src/worker.ts @@ -0,0 +1,19 @@ +import { runWorkerLoop, stopWorkerLoop } from './tasks/worker-loop'; +import { closeDb } from './db'; + +const shutdown = async (signal: string) => { + console.log(`[worker] received ${signal}`); + stopWorkerLoop(); + await Bun.sleep(250); + await closeDb(); + process.exit(0); +}; + +process.on('SIGINT', () => void shutdown('SIGINT')); +process.on('SIGTERM', () => void shutdown('SIGTERM')); + +runWorkerLoop().catch(async (error) => { + console.error('[worker] fatal error', error); + await closeDb(); + process.exit(1); +}); diff --git a/docker-compose.yml b/docker-compose.yml index cc96064..f64758d 100644 --- a/docker-compose.yml +++ b/docker-compose.yml @@ -9,9 +9,9 @@ services: volumes: - postgres_data:/var/lib/postgresql/data expose: - - "5432" + - '5432' healthcheck: - test: ["CMD-SHELL", "pg_isready -U ${POSTGRES_USER:-postgres} -d ${POSTGRES_DB:-fiscal}"] + test: ['CMD-SHELL', 'pg_isready -U ${POSTGRES_USER:-postgres} -d ${POSTGRES_DB:-fiscal}'] interval: 5s timeout: 5s retries: 10 @@ -21,6 +21,7 @@ services: context: ./backend dockerfile: Dockerfile restart: unless-stopped + command: ['sh', '-c', 'bun run src/db/migrate.ts && bun run src/index.ts'] env_file: - path: ./.env required: false @@ -30,33 +31,69 @@ services: DATABASE_URL: ${DATABASE_URL:-postgres://postgres:postgres@postgres:5432/fiscal} PORT: ${PORT:-3001} POSTGRES_HOST: postgres + FRONTEND_URL: ${FRONTEND_URL:-http://localhost:3000} BETTER_AUTH_SECRET: ${BETTER_AUTH_SECRET:-local-dev-better-auth-secret-change-me} BETTER_AUTH_BASE_URL: ${BETTER_AUTH_BASE_URL:-http://localhost:3001} - FRONTEND_URL: ${FRONTEND_URL:-http://localhost:3000} + SEC_USER_AGENT: ${SEC_USER_AGENT:-Fiscal Clone } + OPENCLAW_BASE_URL: ${OPENCLAW_BASE_URL:-} + OPENCLAW_API_KEY: ${OPENCLAW_API_KEY:-} + OPENCLAW_MODEL: ${OPENCLAW_MODEL:-zeroclaw} + TASK_HEARTBEAT_SECONDS: ${TASK_HEARTBEAT_SECONDS:-15} + TASK_STALE_SECONDS: ${TASK_STALE_SECONDS:-120} + TASK_MAX_ATTEMPTS: ${TASK_MAX_ATTEMPTS:-3} expose: - - "3001" + - '3001' depends_on: postgres: condition: service_healthy healthcheck: - test: ["CMD-SHELL", "wget -q --spider http://localhost:3001/api/health || exit 1"] + test: ['CMD-SHELL', 'wget -q --spider http://localhost:3001/api/health || exit 1'] interval: 30s timeout: 10s retries: 3 + worker: + build: + context: ./backend + dockerfile: Dockerfile + restart: unless-stopped + command: ['sh', '-c', 'bun run src/db/migrate.ts && bun run src/worker.ts'] + env_file: + - path: ./.env + required: false + - path: ../.env + required: false + environment: + DATABASE_URL: ${DATABASE_URL:-postgres://postgres:postgres@postgres:5432/fiscal} + PORT: ${PORT:-3001} + POSTGRES_HOST: postgres + FRONTEND_URL: ${FRONTEND_URL:-http://localhost:3000} + BETTER_AUTH_SECRET: ${BETTER_AUTH_SECRET:-local-dev-better-auth-secret-change-me} + BETTER_AUTH_BASE_URL: ${BETTER_AUTH_BASE_URL:-http://localhost:3001} + SEC_USER_AGENT: ${SEC_USER_AGENT:-Fiscal Clone } + OPENCLAW_BASE_URL: ${OPENCLAW_BASE_URL:-} + OPENCLAW_API_KEY: ${OPENCLAW_API_KEY:-} + OPENCLAW_MODEL: ${OPENCLAW_MODEL:-zeroclaw} + TASK_HEARTBEAT_SECONDS: ${TASK_HEARTBEAT_SECONDS:-15} + TASK_STALE_SECONDS: ${TASK_STALE_SECONDS:-120} + TASK_MAX_ATTEMPTS: ${TASK_MAX_ATTEMPTS:-3} + depends_on: + postgres: + condition: service_healthy + frontend: build: context: ./frontend dockerfile: Dockerfile args: - NEXT_PUBLIC_API_URL: ${NEXT_PUBLIC_API_URL:-http://backend:3001} + NEXT_PUBLIC_API_URL: ${NEXT_PUBLIC_API_URL:-http://localhost:3001} restart: unless-stopped environment: PORT: 3000 HOSTNAME: 0.0.0.0 - NEXT_PUBLIC_API_URL: ${NEXT_PUBLIC_API_URL:-http://backend:3001} + NEXT_PUBLIC_API_URL: ${NEXT_PUBLIC_API_URL:-http://localhost:3001} expose: - - "3000" + - '3000' depends_on: - backend diff --git a/docs/REBUILD_DECISIONS.md b/docs/REBUILD_DECISIONS.md new file mode 100644 index 0000000..9dad1bc --- /dev/null +++ b/docs/REBUILD_DECISIONS.md @@ -0,0 +1,78 @@ +# Fiscal Clone Rebuild Decisions + +This document records the ground-up design choices for the 2026 rebuild so every major decision is explicit and reviewable. + +## 1) Architecture: split frontend and API +- Decision: keep `Next.js` in `frontend/` and a dedicated high-throughput API in `backend/`. +- Why: clean separation for scaling and deployment; web rendering and data ingestion do not contend for resources. +- Tradeoff: more services to run locally. + +## 2) Runtime choice: Bun + Elysia for API +- Decision: use Bun runtime with Elysia for low overhead and fast cold/warm request handling. +- Why: strong performance profile for IO-heavy workloads (quotes, SEC fetch, queue polling). +- Tradeoff: narrower ecosystem compatibility than plain Node in some libraries. + +## 3) Auth standard: Better Auth only +- Decision: use Better Auth end-to-end and remove legacy JWT/NextAuth patterns. +- Why: single auth surface across API and Next.js clients, DB-backed sessions, less custom auth code. +- Tradeoff: schema must align closely with Better Auth expectations. + +## 4) Persistence: PostgreSQL as source of truth +- Decision: keep Postgres for all domain entities and task durability. +- Why: transactional consistency, mature operational tooling, simple backup/restore. +- Tradeoff: queue throughput is lower than specialized brokers at massive scale. + +## 5) Long-running jobs: durable DB queue +- Decision: implement a durable `long_tasks` queue table plus dedicated worker process. +- Why: supports multi-minute jobs, retries, result persistence, and survives API restarts. +- Tradeoff: custom queue logic is more code than dropping in a broker library. + +## 6) Async-first API for heavy workflows +- Decision: filing sync, filing analysis, and portfolio insights are queued and polled via `/api/tasks/:id`. +- Why: avoids request timeouts and keeps the UX responsive. +- Tradeoff: frontend must handle job lifecycle states. + +## 7) AI integration contract for OpenClaw/ZeroClaw +- Decision: use an adapter that targets an OpenAI-compatible chat endpoint (`OPENCLAW_BASE_URL`) with model override (`OPENCLAW_MODEL`). +- Why: works with OpenClaw/ZeroClaw deployments while keeping provider lock-in low. +- Tradeoff: advanced provider-specific features are not exposed in v1. + +## 8) SEC ingestion strategy +- Decision: fetch filings from SEC submissions API and enrich with company facts metrics. +- Why: stable machine-readable endpoints with less brittle parsing than HTML scraping. +- Tradeoff: facts can lag specific filing publication timing. + +## 9) Market pricing strategy +- Decision: use Yahoo Finance chart endpoint for quote snapshots and periodic refresh. +- Why: good coverage and straightforward integration for portfolio mark-to-market. +- Tradeoff: endpoint reliability/quotas can vary; provider abstraction retained for future switch. + +## 10) API shape: domain modules + strict schemas +- Decision: organize routes by domain (`portfolio`, `watchlist`, `filings`, `ai`, `tasks`) with Zod-style schema validation via Elysia types. +- Why: predictable contract boundaries and safer payload handling. +- Tradeoff: slight boilerplate cost. + +## 11) Security posture +- Decision: all business endpoints require authenticated session resolution through Better Auth session API. +- Why: prevents cross-user data access and removes implicit trust in client-supplied user IDs. +- Tradeoff: each protected route performs auth/session checks. + +## 12) Frontend rendering model +- Decision: use Next.js App Router with client-heavy dashboards where live polling is required. +- Why: server rendering for shell + interactive client zones for real-time task/market updates. +- Tradeoff: more client-side state management in dashboard screens. + +## 13) Design language: terminal-futurist UI system +- Decision: build a clear terminal-inspired design with grid scanlines, mono + geometric type pairing, and neon cyan/green accent palette. +- Why: matches requested futuristic terminal aesthetic while remaining readable. +- Tradeoff: highly stylized branding may not fit conservative enterprise environments. + +## 14) Performance defaults +- Decision: optimize for fewer round trips (batched fetches), async processing, indexed SQL, and paginated list endpoints. +- Why: improves p95 latency under concurrent load. +- Tradeoff: slightly more complex query/service code. + +## 15) Operations model +- Decision: run three processes in production: frontend, backend API, backend worker. +- Why: isolates web traffic from heavy background processing and enables independent scaling. +- Tradeoff: additional deployment/health-check wiring. diff --git a/frontend/app/auth/signin/page.tsx b/frontend/app/auth/signin/page.tsx index 1c660a9..25239d0 100644 --- a/frontend/app/auth/signin/page.tsx +++ b/frontend/app/auth/signin/page.tsx @@ -1,17 +1,24 @@ 'use client'; -import { signIn, useSession } from '@/lib/better-auth'; -import { useRouter } from 'next/navigation'; -import { useEffect, useState } from 'react'; +export const dynamic = 'force-dynamic'; +export const revalidate = 0; -export default function SignIn() { - const { data: session, isPending: sessionPending } = useSession(); +import Link from 'next/link'; +import { useEffect, useState } from 'react'; +import { useRouter } from 'next/navigation'; +import { signIn, useSession } from '@/lib/better-auth'; +import { AuthShell } from '@/components/auth/auth-shell'; +import { Input } from '@/components/ui/input'; +import { Button } from '@/components/ui/button'; + +export default function SignInPage() { const router = useRouter(); + const { data: session, isPending: sessionPending } = useSession(); const [email, setEmail] = useState(''); const [password, setPassword] = useState(''); const [loading, setLoading] = useState(false); - const [error, setError] = useState(''); + const [error, setError] = useState(null); useEffect(() => { if (!sessionPending && session?.user) { @@ -19,91 +26,64 @@ export default function SignIn() { } }, [sessionPending, session, router]); - const handleCredentialsLogin = async (e: React.FormEvent) => { - e.preventDefault(); + const handleSubmit = async (event: React.FormEvent) => { + event.preventDefault(); setLoading(true); - setError(''); + setError(null); try { - const result = await signIn.email({ - email, - password, - }); + const result = await signIn.email({ email, password }); if (result.error) { setError(result.error.message || 'Invalid credentials'); - return; + } else { + router.replace('/'); + router.refresh(); } - - router.replace('/'); - router.refresh(); - } catch (err) { - setError('Login failed'); + } catch { + setError('Sign in failed'); } finally { setLoading(false); } }; return ( -
-
-

- Fiscal Clone -

-

Sign in to your account

+ + No account yet?{' '} + + Create one + + + )} + > +
+
+ + setEmail(event.target.value)} placeholder="you@company.com" /> +
- {error && ( -
- {error} -
- )} +
+ + setPassword(event.target.value)} + placeholder="********" + /> +
- -
- - setEmail(e.target.value)} - className="w-full bg-slate-700/50 border border-slate-600 rounded-lg px-4 py-3 text-white placeholder-slate-400 focus:outline-none focus:ring-2 focus:ring-blue-500" - placeholder="you@example.com" - required - /> -
+ {error ?

{error}

: null} -
- - setPassword(e.target.value)} - className="w-full bg-slate-700/50 border border-slate-600 rounded-lg px-4 py-3 text-white placeholder-slate-400 focus:outline-none focus:ring-2 focus:ring-blue-500" - placeholder="•••••••••" - required - minLength={8} - /> -
- - -
- -

- Don't have an account?{' '} - - Sign up - -

-
-
+ + + ); } diff --git a/frontend/app/auth/signup/page.tsx b/frontend/app/auth/signup/page.tsx index 282f617..09afddf 100644 --- a/frontend/app/auth/signup/page.tsx +++ b/frontend/app/auth/signup/page.tsx @@ -1,18 +1,25 @@ 'use client'; -import { signUp, useSession } from '@/lib/better-auth'; -import { useRouter } from 'next/navigation'; -import { useEffect, useState } from 'react'; +export const dynamic = 'force-dynamic'; +export const revalidate = 0; -export default function SignUp() { - const { data: session, isPending: sessionPending } = useSession(); +import Link from 'next/link'; +import { useEffect, useState } from 'react'; +import { useRouter } from 'next/navigation'; +import { signUp, useSession } from '@/lib/better-auth'; +import { AuthShell } from '@/components/auth/auth-shell'; +import { Input } from '@/components/ui/input'; +import { Button } from '@/components/ui/button'; + +export default function SignUpPage() { const router = useRouter(); + const { data: session, isPending: sessionPending } = useSession(); const [name, setName] = useState(''); const [email, setEmail] = useState(''); const [password, setPassword] = useState(''); const [loading, setLoading] = useState(false); - const [error, setError] = useState(''); + const [error, setError] = useState(null); useEffect(() => { if (!sessionPending && session?.user) { @@ -20,25 +27,25 @@ export default function SignUp() { } }, [sessionPending, session, router]); - const handleSignUp = async (e: React.FormEvent) => { - e.preventDefault(); + const handleSubmit = async (event: React.FormEvent) => { + event.preventDefault(); setLoading(true); - setError(''); + setError(null); try { const result = await signUp.email({ - email, - password, name, + email, + password }); if (result.error) { - setError(result.error.message || 'Sign up failed'); + setError(result.error.message || 'Unable to create account'); } else { router.replace('/'); router.refresh(); } - } catch (err) { + } catch { setError('Sign up failed'); } finally { setLoading(false); @@ -46,79 +53,47 @@ export default function SignUp() { }; return ( -
-
-

- Fiscal Clone -

-

Create your account

- - {error && ( -
- {error} -
- )} - -
-
- - setName(e.target.value)} - className="w-full bg-slate-700/50 border border-slate-600 rounded-lg px-4 py-3 text-white placeholder-slate-400 focus:outline-none focus:ring-2 focus:ring-blue-500" - placeholder="Your name" - required - /> -
- -
- - setEmail(e.target.value)} - className="w-full bg-slate-700/50 border border-slate-600 rounded-lg px-4 py-3 text-white placeholder-slate-400 focus:outline-none focus:ring-2 focus:ring-blue-500" - placeholder="you@example.com" - required - /> -
- -
- - setPassword(e.target.value)} - className="w-full bg-slate-700/50 border border-slate-600 rounded-lg px-4 py-3 text-white placeholder-slate-400 focus:outline-none focus:ring-2 focus:ring-blue-500" - placeholder="•••••••••" - required - minLength={8} - /> -
- - -
- -

- Already have an account?{' '} - + + Already registered?{' '} + Sign in - -

-
-
+ + + )} + > +
+
+ + setName(event.target.value)} placeholder="Operator name" /> +
+ +
+ + setEmail(event.target.value)} placeholder="you@company.com" /> +
+ +
+ + setPassword(event.target.value)} + placeholder="Minimum 8 characters" + /> +
+ + {error ?

{error}

: null} + + +
+ ); } diff --git a/frontend/app/filings/page.tsx b/frontend/app/filings/page.tsx index d21f57b..b86ba50 100644 --- a/frontend/app/filings/page.tsx +++ b/frontend/app/filings/page.tsx @@ -1,185 +1,251 @@ 'use client'; -import { useSession } from '@/lib/better-auth'; -import { useRouter } from 'next/navigation'; -import { useEffect, useState } from 'react'; -import Link from 'next/link'; +import { useCallback, useEffect, useMemo, useState } from 'react'; import { format } from 'date-fns'; +import { Bot, Download, Search, TimerReset } from 'lucide-react'; +import { useSearchParams } from 'next/navigation'; +import { AppShell } from '@/components/shell/app-shell'; +import { Panel } from '@/components/ui/panel'; +import { Button } from '@/components/ui/button'; +import { Input } from '@/components/ui/input'; +import { StatusPill } from '@/components/ui/status-pill'; +import { useAuthGuard } from '@/hooks/use-auth-guard'; +import { useTaskPoller } from '@/hooks/use-task-poller'; +import { getTask, listFilings, queueFilingAnalysis, queueFilingSync } from '@/lib/api'; +import type { Filing, Task } from '@/lib/types'; +import { formatCompactCurrency } from '@/lib/format'; export default function FilingsPage() { - const { data: session, isPending } = useSession(); - const router = useRouter(); - const [filings, setFilings] = useState([]); + const { isPending, isAuthenticated } = useAuthGuard(); + const searchParams = useSearchParams(); + + const [filings, setFilings] = useState([]); const [loading, setLoading] = useState(true); + const [error, setError] = useState(null); + const [syncTickerInput, setSyncTickerInput] = useState(''); + const [filterTickerInput, setFilterTickerInput] = useState(''); const [searchTicker, setSearchTicker] = useState(''); + const [activeTask, setActiveTask] = useState(null); useEffect(() => { - if (!isPending && !session) { - router.push('/auth/signin'); - return; + const ticker = searchParams.get('ticker'); + if (ticker) { + const normalized = ticker.toUpperCase(); + setSyncTickerInput(normalized); + setFilterTickerInput(normalized); + setSearchTicker(normalized); } + }, [searchParams]); - if (session?.user) { - fetchFilings(); - } - }, [session, isPending, router]); - - const fetchFilings = async (ticker?: string) => { + const loadFilings = useCallback(async (ticker?: string) => { setLoading(true); - try { - const url = ticker - ? `${process.env.NEXT_PUBLIC_API_URL}/api/filings/${ticker}` - : `${process.env.NEXT_PUBLIC_API_URL}/api/filings`; + setError(null); - const response = await fetch(url); - const data = await response.json(); - setFilings(data); - } catch (error) { - console.error('Error fetching filings:', error); + try { + const response = await listFilings({ ticker, limit: 120 }); + setFilings(response.filings); + } catch (err) { + setError(err instanceof Error ? err.message : 'Unable to fetch filings'); } finally { setLoading(false); } - }; + }, []); - const handleSearch = (e: React.FormEvent) => { - e.preventDefault(); - fetchFilings(searchTicker || undefined); - }; + useEffect(() => { + if (!isPending && isAuthenticated) { + void loadFilings(searchTicker || undefined); + } + }, [isPending, isAuthenticated, searchTicker, loadFilings]); + + const polledTask = useTaskPoller({ + taskId: activeTask?.id ?? null, + onTerminalState: async () => { + setActiveTask(null); + await loadFilings(searchTicker || undefined); + } + }); + + const liveTask = polledTask ?? activeTask; + + const triggerSync = async () => { + if (!syncTickerInput.trim()) { + return; + } - const handleRefresh = async (ticker: string) => { try { - await fetch(`${process.env.NEXT_PUBLIC_API_URL}/api/filings/refresh/${ticker}`, { - method: 'POST' - }); - fetchFilings(ticker); - } catch (error) { - console.error('Error refreshing filings:', error); + const { task } = await queueFilingSync({ ticker: syncTickerInput.trim().toUpperCase(), limit: 20 }); + const latest = await getTask(task.id); + setActiveTask(latest.task); + } catch (err) { + setError(err instanceof Error ? err.message : 'Failed to queue filing sync'); } }; - const getFilingTypeColor = (type: string) => { - switch (type) { - case '10-K': return 'bg-blue-500/20 text-blue-400 border-blue-500/30'; - case '10-Q': return 'bg-green-500/20 text-green-400 border-green-500/30'; - case '8-K': return 'bg-purple-500/20 text-purple-400 border-purple-500/30'; - default: return 'bg-slate-500/20 text-slate-400 border-slate-500/30'; + const triggerAnalysis = async (accessionNumber: string) => { + try { + const { task } = await queueFilingAnalysis(accessionNumber); + const latest = await getTask(task.id); + setActiveTask(latest.task); + } catch (err) { + setError(err instanceof Error ? err.message : 'Failed to queue filing analysis'); } }; - if (loading) { - return
Loading...
; + const groupedByTicker = useMemo(() => { + const counts = new Map(); + + for (const filing of filings) { + counts.set(filing.ticker, (counts.get(filing.ticker) ?? 0) + 1); + } + + return counts; + }, [filings]); + + if (isPending || !isAuthenticated) { + return
Opening filings stream...
; } return ( -
- + {liveTask.error ?

{liveTask.error}

: null} + + ) : null} -
-
-

SEC Filings

- + +
{ + event.preventDefault(); + void triggerSync(); + }} > - + Add to Watchlist - -
- -
- - setSearchTicker(e.target.value)} - className="flex-1 bg-slate-700/50 border border-slate-600 rounded-lg px-4 py-3 text-white placeholder-slate-400 focus:outline-none focus:ring-2 focus:ring-blue-500" - placeholder="Search by ticker (e.g., AAPL)" + setSyncTickerInput(event.target.value.toUpperCase())} + placeholder="Ticker (AAPL)" + className="max-w-xs" /> - - + + + + +
{ + event.preventDefault(); + setSearchTicker(filterTickerInput.trim().toUpperCase()); + }} + > + setFilterTickerInput(event.target.value.toUpperCase())} + placeholder="Ticker filter" + className="max-w-xs" + /> + + +
-
+ +
-
- {filings.length > 0 ? ( - - + + {error ?

{error}

: null} + {loading ? ( +

Fetching filings...

+ ) : filings.length === 0 ? ( +

No filings available. Queue a sync job to ingest fresh SEC data.

+ ) : ( +
+
+ - - - - - + + + + + + + - {filings.map((filing: any) => ( - - - - - - - - ))} + {filings.map((filing) => { + const revenue = filing.metrics?.revenue; + const hasAnalysis = Boolean(filing.analysis?.text || filing.analysis?.legacyInsights); + + return ( + + + + + + + + + + ); + })}
TickerCompanyTypeFiling DateActionsTickerTypeFiledRevenue SnapshotCompanyAIAction
{filing.ticker}{filing.company_name} - - {filing.filing_type} - - - {format(new Date(filing.filing_date), 'MMM dd, yyyy')} - - - -
+
{filing.ticker}
+
{groupedByTicker.get(filing.ticker)} filings
+
{filing.filing_type}{format(new Date(filing.filing_date), 'MMM dd, yyyy')}{revenue ? formatCompactCurrency(revenue) : 'n/a'}{filing.company_name}{hasAnalysis ? 'Ready' : 'Not generated'} +
+ {filing.filing_url ? ( + + SEC + + ) : null} + +
+
- ) : ( -
-

No filings found

-

- Add stocks to your watchlist to track their SEC filings -

-
- )} -
- - + + )} + + ); } diff --git a/frontend/app/globals.css b/frontend/app/globals.css index e66d5a5..f057b89 100644 --- a/frontend/app/globals.css +++ b/frontend/app/globals.css @@ -2,20 +2,124 @@ @tailwind components; @tailwind utilities; -@layer base { - :root { - --background: 222.2 84% 4.9%; - --foreground: 210 40% 98%; - --border: 217.2 32.6% 17.5%; +:root { + --bg-0: #05080d; + --bg-1: #08121a; + --bg-2: #0b1f28; + --panel: rgba(6, 17, 24, 0.8); + --panel-soft: rgba(7, 22, 31, 0.62); + --panel-bright: rgba(10, 33, 45, 0.9); + --line-weak: rgba(126, 217, 255, 0.22); + --line-strong: rgba(123, 255, 217, 0.75); + --accent: #68ffd5; + --accent-strong: #8cffeb; + --danger: #ff7070; + --danger-soft: rgba(122, 33, 33, 0.44); + --terminal-bright: #e8fff8; + --terminal-muted: #94b9c5; +} + +* { + box-sizing: border-box; +} + +html, +body { + margin: 0; + padding: 0; +} + +body { + min-height: 100vh; + font-family: var(--font-display), sans-serif; + color: var(--terminal-bright); + background: + radial-gradient(circle at 18% -10%, rgba(126, 217, 255, 0.25), transparent 35%), + radial-gradient(circle at 84% 0%, rgba(104, 255, 213, 0.2), transparent 30%), + linear-gradient(140deg, var(--bg-0), var(--bg-1) 50%, var(--bg-2)); +} + +.app-surface, +.auth-page { + position: relative; + min-height: 100vh; + overflow: hidden; +} + +.ambient-grid { + position: absolute; + inset: 0; + background-image: + linear-gradient(rgba(126, 217, 255, 0.08) 1px, transparent 1px), + linear-gradient(90deg, rgba(126, 217, 255, 0.07) 1px, transparent 1px); + background-size: 34px 34px; + mask-image: radial-gradient(ellipse at center, black 20%, transparent 75%); + pointer-events: none; +} + +.noise-layer { + position: absolute; + inset: 0; + pointer-events: none; + opacity: 0.3; + background-image: radial-gradient(rgba(160, 255, 227, 0.15) 0.7px, transparent 0.7px); + background-size: 4px 4px; +} + +.terminal-caption { + font-family: var(--font-mono), monospace; +} + +.panel-heading { + font-family: var(--font-mono), monospace; + letter-spacing: 0.08em; +} + +.data-table { + width: 100%; + border-collapse: collapse; +} + +.data-table th, +.data-table td { + border-bottom: 1px solid var(--line-weak); + padding: 0.75rem 0.65rem; + text-align: left; + font-size: 0.875rem; +} + +.data-table th { + font-family: var(--font-mono), monospace; + font-size: 0.75rem; + letter-spacing: 0.08em; + text-transform: uppercase; + color: var(--terminal-muted); +} + +.data-table tbody tr:hover { + background-color: rgba(17, 47, 61, 0.45); +} + +@media (prefers-reduced-motion: no-preference) { + .ambient-grid { + animation: subtle-grid-shift 18s linear infinite; + } + + @keyframes subtle-grid-shift { + 0% { + transform: translateY(0px); + } + 50% { + transform: translateY(-8px); + } + 100% { + transform: translateY(0px); + } } } -@layer base { - * { - border-color: hsl(var(--border)); - } - body { - background-color: hsl(var(--background)); - color: hsl(var(--foreground)); +@media (max-width: 1024px) { + .ambient-grid { + background-size: 26px 26px; } } diff --git a/frontend/app/layout.tsx b/frontend/app/layout.tsx index 661a3b9..b0e4b85 100644 --- a/frontend/app/layout.tsx +++ b/frontend/app/layout.tsx @@ -1,15 +1,26 @@ import './globals.css'; +import type { Metadata } from 'next'; +import { JetBrains_Mono, Space_Grotesk } from 'next/font/google'; -export default function RootLayout({ - children, -}: { - children: React.ReactNode -}) { +const display = Space_Grotesk({ + subsets: ['latin'], + variable: '--font-display' +}); + +const mono = JetBrains_Mono({ + subsets: ['latin'], + variable: '--font-mono' +}); + +export const metadata: Metadata = { + title: 'Fiscal Clone', + description: 'Futuristic fiscal intelligence terminal powered by Better Auth and durable AI tasks.' +}; + +export default function RootLayout({ children }: { children: React.ReactNode }) { return ( - - - {children} - + + {children} - ) + ); } diff --git a/frontend/app/page.tsx b/frontend/app/page.tsx index 1ddbe63..7778541 100644 --- a/frontend/app/page.tsx +++ b/frontend/app/page.tsx @@ -1,110 +1,229 @@ 'use client'; -import { useSession } from '@/lib/better-auth'; -import { useRouter } from 'next/navigation'; -import { useEffect, useState } from 'react'; import Link from 'next/link'; +import { useCallback, useEffect, useMemo, useState } from 'react'; +import { Activity, Bot, RefreshCw, Sparkles } from 'lucide-react'; +import { AppShell } from '@/components/shell/app-shell'; +import { Panel } from '@/components/ui/panel'; +import { Button } from '@/components/ui/button'; +import { MetricCard } from '@/components/dashboard/metric-card'; +import { TaskFeed } from '@/components/dashboard/task-feed'; +import { StatusPill } from '@/components/ui/status-pill'; +import { useAuthGuard } from '@/hooks/use-auth-guard'; +import { useTaskPoller } from '@/hooks/use-task-poller'; +import { + getLatestPortfolioInsight, + getPortfolioSummary, + getTask, + listFilings, + listRecentTasks, + listWatchlist, + queuePortfolioInsights, + queuePriceRefresh +} from '@/lib/api'; +import type { PortfolioInsight, PortfolioSummary, Task } from '@/lib/types'; +import { formatCompactCurrency, formatCurrency, formatPercent } from '@/lib/format'; -export default function Home() { - const { data: session, isPending } = useSession(); - const router = useRouter(); - const [stats, setStats] = useState({ filings: 0, portfolioValue: 0, watchlist: 0 }); +type DashboardState = { + summary: PortfolioSummary; + filingsCount: number; + watchlistCount: number; + tasks: Task[]; + latestInsight: PortfolioInsight | null; +}; - useEffect(() => { - if (!isPending && !session) { - router.push('/auth/signin'); - return; - } +const EMPTY_STATE: DashboardState = { + summary: { + positions: 0, + total_value: '0', + total_gain_loss: '0', + total_cost_basis: '0', + avg_return_pct: '0' + }, + filingsCount: 0, + watchlistCount: 0, + tasks: [], + latestInsight: null +}; - if (session?.user) { - fetchStats(session.user.id); - } - }, [session, isPending, router]); +export default function CommandCenterPage() { + const { isPending, isAuthenticated, session } = useAuthGuard(); + const [state, setState] = useState(EMPTY_STATE); + const [loading, setLoading] = useState(true); + const [error, setError] = useState(null); + const [activeTaskId, setActiveTaskId] = useState(null); + + const loadData = useCallback(async () => { + setLoading(true); + setError(null); - const fetchStats = async (userId: string) => { try { - const [portfolioRes, watchlistRes, filingsRes] = await Promise.all([ - fetch(`${process.env.NEXT_PUBLIC_API_URL}/api/portfolio/${userId}/summary`), - fetch(`${process.env.NEXT_PUBLIC_API_URL}/api/watchlist/${userId}`), - fetch(`${process.env.NEXT_PUBLIC_API_URL}/api/filings`) + const [summaryRes, filingsRes, watchlistRes, tasksRes, insightRes] = await Promise.all([ + getPortfolioSummary(), + listFilings({ limit: 200 }), + listWatchlist(), + listRecentTasks(20), + getLatestPortfolioInsight() ]); - const portfolioData = await portfolioRes.json(); - const watchlistData = await watchlistRes.json(); - const filingsData = await filingsRes.json(); - - setStats({ - filings: filingsData.length || 0, - portfolioValue: portfolioData.total_value || 0, - watchlist: watchlistData.length || 0 + setState({ + summary: summaryRes.summary, + filingsCount: filingsRes.filings.length, + watchlistCount: watchlistRes.items.length, + tasks: tasksRes.tasks, + latestInsight: insightRes.insight }); - } catch (error) { - console.error('Error fetching stats:', error); + } catch (err) { + setError(err instanceof Error ? err.message : 'Failed to load dashboard'); + } finally { + setLoading(false); } - }; + }, []); + + useEffect(() => { + if (!isPending && isAuthenticated) { + void loadData(); + } + }, [isPending, isAuthenticated, loadData]); + + const trackedTask = useTaskPoller({ + taskId: activeTaskId, + onTerminalState: () => { + setActiveTaskId(null); + void loadData(); + } + }); + + const headerActions = ( + <> + + + + ); + + const signedGain = useMemo(() => { + const gain = Number(state.summary.total_gain_loss ?? 0); + return gain >= 0 ? `+${formatCurrency(gain)}` : formatCurrency(gain); + }, [state.summary.total_gain_loss]); + + if (isPending || !isAuthenticated) { + return
Booting secure terminal...
; + } return ( -
- + -
-
-

Dashboard

-

Welcome back, {session?.user?.name}

+ +
+ + Runtime state: {loading ? 'syncing' : 'stable'}
- -
-
-

Total Filings

-

{stats.filings}

-
-
-

Portfolio Value

-

- ${stats.portfolioValue.toLocaleString('en-US', { minimumFractionDigits: 2, maximumFractionDigits: 2 })} -

-
-
-

Watchlist

-

{stats.watchlist}

-
-
- -
-

Quick Actions

-
- - Add to Watchlist - - - Add to Portfolio - - - Search SEC Filings - - - View Portfolio - -
-
-
-
+ + ); } diff --git a/frontend/app/portfolio/page.tsx b/frontend/app/portfolio/page.tsx index 22663c3..e676326 100644 --- a/frontend/app/portfolio/page.tsx +++ b/frontend/app/portfolio/page.tsx @@ -1,301 +1,332 @@ 'use client'; -import { useSession } from '@/lib/better-auth'; -import { useRouter } from 'next/navigation'; -import { useEffect, useState } from 'react'; -import { LineChart, Line, XAxis, YAxis, CartesianGrid, Tooltip, ResponsiveContainer, PieChart, Pie, Cell, Legend } from 'recharts'; -import { format } from 'date-fns'; -import Link from 'next/link'; +import { useCallback, useEffect, useMemo, useState } from 'react'; +import { PieChart, Pie, Cell, Tooltip, ResponsiveContainer, BarChart, CartesianGrid, XAxis, YAxis, Bar } from 'recharts'; +import { BrainCircuit, Plus, RefreshCcw, Trash2 } from 'lucide-react'; +import { AppShell } from '@/components/shell/app-shell'; +import { Panel } from '@/components/ui/panel'; +import { Button } from '@/components/ui/button'; +import { Input } from '@/components/ui/input'; +import { StatusPill } from '@/components/ui/status-pill'; +import { useAuthGuard } from '@/hooks/use-auth-guard'; +import { useTaskPoller } from '@/hooks/use-task-poller'; +import { + deleteHolding, + getLatestPortfolioInsight, + getTask, + getPortfolioSummary, + listHoldings, + queuePortfolioInsights, + queuePriceRefresh, + upsertHolding +} from '@/lib/api'; +import type { Holding, PortfolioInsight, PortfolioSummary, Task } from '@/lib/types'; +import { asNumber, formatCurrency, formatPercent } from '@/lib/format'; + +type FormState = { + ticker: string; + shares: string; + avgCost: string; + currentPrice: string; +}; + +const CHART_COLORS = ['#6effd8', '#5fd3ff', '#66ffa1', '#8dbbff', '#f4f88f', '#ff9c9c']; + +const EMPTY_SUMMARY: PortfolioSummary = { + positions: 0, + total_value: '0', + total_gain_loss: '0', + total_cost_basis: '0', + avg_return_pct: '0' +}; export default function PortfolioPage() { - const { data: session, isPending } = useSession(); - const router = useRouter(); - const [portfolio, setPortfolio] = useState([]); - const [summary, setSummary] = useState({ total_value: 0, total_gain_loss: 0, cost_basis: 0 }); + const { isPending, isAuthenticated } = useAuthGuard(); + + const [holdings, setHoldings] = useState([]); + const [summary, setSummary] = useState(EMPTY_SUMMARY); + const [latestInsight, setLatestInsight] = useState(null); const [loading, setLoading] = useState(true); - const [showAddModal, setShowAddModal] = useState(false); - const [newHolding, setNewHolding] = useState({ ticker: '', shares: '', avg_cost: '' }); + const [error, setError] = useState(null); + const [activeTask, setActiveTask] = useState(null); + const [form, setForm] = useState({ ticker: '', shares: '', avgCost: '', currentPrice: '' }); - useEffect(() => { - if (!isPending && !session) { - router.push('/auth/signin'); - return; - } + const loadPortfolio = useCallback(async () => { + setLoading(true); + setError(null); - if (session?.user?.id) { - fetchPortfolio(session.user.id); - } - }, [session, isPending, router]); - - const fetchPortfolio = async (userId: string) => { try { - const [portfolioRes, summaryRes] = await Promise.all([ - fetch(`${process.env.NEXT_PUBLIC_API_URL}/api/portfolio/${userId}`), - fetch(`${process.env.NEXT_PUBLIC_API_URL}/api/portfolio/${userId}/summary`) + const [holdingsRes, summaryRes, insightRes] = await Promise.all([ + listHoldings(), + getPortfolioSummary(), + getLatestPortfolioInsight() ]); - const portfolioData = await portfolioRes.json(); - const summaryData = await summaryRes.json(); - - setPortfolio(portfolioData); - setSummary(summaryData); - } catch (error) { - console.error('Error fetching portfolio:', error); + setHoldings(holdingsRes.holdings); + setSummary(summaryRes.summary); + setLatestInsight(insightRes.insight); + } catch (err) { + setError(err instanceof Error ? err.message : 'Could not fetch portfolio data'); } finally { setLoading(false); } - }; + }, []); - const handleAddHolding = async (e: React.FormEvent) => { - e.preventDefault(); - const userId = session?.user?.id; - if (!userId) return; + useEffect(() => { + if (!isPending && isAuthenticated) { + void loadPortfolio(); + } + }, [isPending, isAuthenticated, loadPortfolio]); + + const polledTask = useTaskPoller({ + taskId: activeTask?.id ?? null, + onTerminalState: async () => { + setActiveTask(null); + await loadPortfolio(); + } + }); + + const liveTask = polledTask ?? activeTask; + + const allocationData = useMemo( + () => holdings.map((holding) => ({ + name: holding.ticker, + value: asNumber(holding.market_value) + })), + [holdings] + ); + + const performanceData = useMemo( + () => holdings.map((holding) => ({ + name: holding.ticker, + value: asNumber(holding.gain_loss_pct) + })), + [holdings] + ); + + const submitHolding = async (event: React.FormEvent) => { + event.preventDefault(); try { - await fetch(`${process.env.NEXT_PUBLIC_API_URL}/api/portfolio`, { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({ - user_id: userId, - ticker: newHolding.ticker.toUpperCase(), - shares: parseFloat(newHolding.shares), - avg_cost: parseFloat(newHolding.avg_cost) - }) + await upsertHolding({ + ticker: form.ticker.toUpperCase(), + shares: Number(form.shares), + avgCost: Number(form.avgCost), + currentPrice: form.currentPrice ? Number(form.currentPrice) : undefined }); - setShowAddModal(false); - setNewHolding({ ticker: '', shares: '', avg_cost: '' }); - fetchPortfolio(userId); - } catch (error) { - console.error('Error adding holding:', error); + setForm({ ticker: '', shares: '', avgCost: '', currentPrice: '' }); + await loadPortfolio(); + } catch (err) { + setError(err instanceof Error ? err.message : 'Failed to save holding'); } }; - const handleDeleteHolding = async (id: number) => { - if (!confirm('Are you sure you want to delete this holding?')) return; - const userId = session?.user?.id; - if (!userId) return; - + const queueRefresh = async () => { try { - await fetch(`${process.env.NEXT_PUBLIC_API_URL}/api/portfolio/${id}`, { - method: 'DELETE' - }); - - fetchPortfolio(userId); - } catch (error) { - console.error('Error deleting holding:', error); + const { task } = await queuePriceRefresh(); + const latest = await getTask(task.id); + setActiveTask(latest.task); + } catch (err) { + setError(err instanceof Error ? err.message : 'Unable to queue price refresh'); } }; - if (loading) { - return
Loading...
; + const queueInsights = async () => { + try { + const { task } = await queuePortfolioInsights(); + const latest = await getTask(task.id); + setActiveTask(latest.task); + } catch (err) { + setError(err instanceof Error ? err.message : 'Unable to queue portfolio insights'); + } + }; + + if (isPending || !isAuthenticated) { + return
Loading portfolio matrix...
; } - const pieData = portfolio.length > 0 ? portfolio.map((p: any) => ({ - name: p.ticker, - value: p.current_value || (p.shares * p.avg_cost) - })) : []; - - const COLORS = ['#3b82f6', '#8b5cf6', '#10b981', '#f59e0b', '#ef4444', '#ec4899']; - return ( -
- + {liveTask.error ?

{liveTask.error}

: null} + + ) : null} -
-
-

Portfolio

- -
+ {error ? ( + +

{error}

+
+ ) : null} -
-
-

Total Value

-

- ${summary.total_value?.toLocaleString('en-US', { minimumFractionDigits: 2, maximumFractionDigits: 2 }) || '$0.00'} -

-
-
-

Total Gain/Loss

-

= 0 ? 'text-green-400' : 'text-red-400'}`}> - {summary.total_gain_loss >= 0 ? '+' : ''}${summary.total_gain_loss?.toLocaleString('en-US', { minimumFractionDigits: 2, maximumFractionDigits: 2 }) || '$0.00'} -

-
-
-

Positions

-

{portfolio.length}

-
-
+
+ +

{formatCurrency(summary.total_value)}

+

Cost basis {formatCurrency(summary.total_cost_basis)}

+
+ +

= 0 ? 'text-[#96f5bf]' : 'text-[#ff9f9f]'}`}> + {formatCurrency(summary.total_gain_loss)} +

+

Average return {formatPercent(summary.avg_return_pct)}

+
+ +

{summary.positions}

+

Active symbols in portfolio.

+
+
-
-
-

Portfolio Allocation

- {pieData.length > 0 ? ( - +
+ + {loading ? ( +

Loading chart...

+ ) : allocationData.length > 0 ? ( +
+ - `${entry.name} ($${(entry.value / 1000).toFixed(1)}k)`} - > - {pieData.map((entry, index) => ( - + + {allocationData.map((entry, index) => ( + ))} - + formatCurrency(value)} /> - ) : ( -

No holdings yet

- )} -
+
+ ) : ( +

No holdings yet.

+ )} + -
-

Performance

- {portfolio.length > 0 ? ( - - ({ name: p.ticker, value: p.gain_loss_pct || 0 }))}> - - - - - - + + {loading ? ( +

Loading chart...

+ ) : performanceData.length > 0 ? ( +
+ + + + + + `${value.toFixed(2)}%`} /> + + - ) : ( -

No performance data yet

- )} -
-
+
+ ) : ( +

No performance data yet.

+ )} + +
-
- - - - - - - - - - - - - - - {portfolio.map((holding: any) => ( - - - - - - - - - - - ))} - -
TickerSharesAvg CostCurrent PriceValueGain/Loss%Actions
{holding.ticker}{holding.shares.toLocaleString()}${holding.avg_cost.toFixed(2)}${holding.current_price?.toFixed(2) || 'N/A'}${holding.current_value?.toFixed(2) || 'N/A'}= 0 ? 'text-green-400' : 'text-red-400'}`}> - {holding.gain_loss >= 0 ? '+' : ''}${holding.gain_loss?.toFixed(2) || '0.00'} - = 0 ? 'text-green-400' : 'text-red-400'}`}> - {holding.gain_loss_pct >= 0 ? '+' : ''}{holding.gain_loss_pct?.toFixed(2) || '0.00'}% - - -
-
-
+
+ + {loading ? ( +

Loading holdings...

+ ) : holdings.length === 0 ? ( +

No holdings added yet.

+ ) : ( +
+ + + + + + + + + + + + + + {holdings.map((holding) => ( + + + + + + + + + + ))} + +
TickerSharesAvg CostPriceValueGain/LossAction
{holding.ticker}{asNumber(holding.shares).toLocaleString()}{formatCurrency(holding.avg_cost)}{holding.current_price ? formatCurrency(holding.current_price) : 'n/a'}{formatCurrency(holding.market_value)}= 0 ? 'text-[#96f5bf]' : 'text-[#ff9898]'}> + {formatCurrency(holding.gain_loss)} ({formatPercent(holding.gain_loss_pct)}) + + +
+
+ )} +
- {showAddModal && ( -
-
-

Add Holding

-
-
- - setNewHolding({...newHolding, ticker: e.target.value})} - className="w-full bg-slate-700 border border-slate-600 rounded-lg px-4 py-3 text-white" - placeholder="AAPL" - required - /> -
-
- - setNewHolding({...newHolding, shares: e.target.value})} - className="w-full bg-slate-700 border border-slate-600 rounded-lg px-4 py-3 text-white" - placeholder="100" - required - /> -
-
- - setNewHolding({...newHolding, avg_cost: e.target.value})} - className="w-full bg-slate-700 border border-slate-600 rounded-lg px-4 py-3 text-white" - placeholder="150.00" - required - /> -
-
- - -
-
+ +
+
+ + setForm((prev) => ({ ...prev, ticker: event.target.value.toUpperCase() }))} required /> +
+
+ + setForm((prev) => ({ ...prev, shares: event.target.value }))} required /> +
+
+ + setForm((prev) => ({ ...prev, avgCost: event.target.value }))} required /> +
+
+ + setForm((prev) => ({ ...prev, currentPrice: event.target.value }))} /> +
+ +
+ +
+

Latest AI Insight

+

+ {latestInsight?.content ?? 'No insight available yet. Queue an AI brief from the header.'} +

-
- )} -
+ +
+ ); } diff --git a/frontend/app/watchlist/page.tsx b/frontend/app/watchlist/page.tsx index c351d0f..4383d03 100644 --- a/frontend/app/watchlist/page.tsx +++ b/frontend/app/watchlist/page.tsx @@ -1,224 +1,181 @@ 'use client'; -import { useSession } from '@/lib/better-auth'; -import { useRouter } from 'next/navigation'; -import { useEffect, useState } from 'react'; +import { useCallback, useEffect, useState } from 'react'; +import { ArrowRight, Eye, Plus, Trash2 } from 'lucide-react'; import Link from 'next/link'; +import { AppShell } from '@/components/shell/app-shell'; +import { Panel } from '@/components/ui/panel'; +import { Button } from '@/components/ui/button'; +import { Input } from '@/components/ui/input'; +import { StatusPill } from '@/components/ui/status-pill'; +import { useAuthGuard } from '@/hooks/use-auth-guard'; +import { useTaskPoller } from '@/hooks/use-task-poller'; +import { deleteWatchlistItem, getTask, listWatchlist, queueFilingSync, upsertWatchlistItem } from '@/lib/api'; +import type { Task, WatchlistItem } from '@/lib/types'; + +type FormState = { + ticker: string; + companyName: string; + sector: string; +}; export default function WatchlistPage() { - const { data: session, isPending } = useSession(); - const router = useRouter(); - const [watchlist, setWatchlist] = useState([]); + const { isPending, isAuthenticated } = useAuthGuard(); + + const [items, setItems] = useState([]); const [loading, setLoading] = useState(true); - const [showAddModal, setShowAddModal] = useState(false); - const [newStock, setNewStock] = useState({ ticker: '', company_name: '', sector: '' }); + const [error, setError] = useState(null); + const [activeTask, setActiveTask] = useState(null); + const [form, setForm] = useState({ ticker: '', companyName: '', sector: '' }); - useEffect(() => { - if (!isPending && !session) { - router.push('/auth/signin'); - return; - } + const loadWatchlist = useCallback(async () => { + setLoading(true); + setError(null); - if (session?.user?.id) { - fetchWatchlist(session.user.id); - } - }, [session, isPending, router]); - - const fetchWatchlist = async (userId: string) => { try { - const response = await fetch(`${process.env.NEXT_PUBLIC_API_URL}/api/watchlist/${userId}`); - const data = await response.json(); - setWatchlist(data); - } catch (error) { - console.error('Error fetching watchlist:', error); + const response = await listWatchlist(); + setItems(response.items); + } catch (err) { + setError(err instanceof Error ? err.message : 'Failed to load watchlist'); } finally { setLoading(false); } - }; + }, []); - const handleAddStock = async (e: React.FormEvent) => { - e.preventDefault(); - const userId = session?.user?.id; - if (!userId) return; + useEffect(() => { + if (!isPending && isAuthenticated) { + void loadWatchlist(); + } + }, [isPending, isAuthenticated, loadWatchlist]); + + const polledTask = useTaskPoller({ + taskId: activeTask?.id ?? null, + onTerminalState: () => { + setActiveTask(null); + } + }); + + const liveTask = polledTask ?? activeTask; + + const submit = async (event: React.FormEvent) => { + event.preventDefault(); try { - await fetch(`${process.env.NEXT_PUBLIC_API_URL}/api/watchlist`, { - method: 'POST', - headers: { 'Content-Type': 'application/json' }, - body: JSON.stringify({ - user_id: userId, - ticker: newStock.ticker.toUpperCase(), - company_name: newStock.company_name, - sector: newStock.sector - }) + await upsertWatchlistItem({ + ticker: form.ticker.toUpperCase(), + companyName: form.companyName, + sector: form.sector || undefined }); - setShowAddModal(false); - setNewStock({ ticker: '', company_name: '', sector: '' }); - fetchWatchlist(userId); - } catch (error) { - console.error('Error adding stock:', error); + setForm({ ticker: '', companyName: '', sector: '' }); + await loadWatchlist(); + } catch (err) { + setError(err instanceof Error ? err.message : 'Failed to save watchlist item'); } }; - const handleDeleteStock = async (id: number) => { - if (!confirm('Are you sure you want to remove this stock from watchlist?')) return; - const userId = session?.user?.id; - if (!userId) return; - + const queueSync = async (ticker: string) => { try { - await fetch(`${process.env.NEXT_PUBLIC_API_URL}/api/watchlist/${id}`, { - method: 'DELETE' - }); - - fetchWatchlist(userId); - } catch (error) { - console.error('Error deleting stock:', error); + const { task } = await queueFilingSync({ ticker, limit: 20 }); + const latest = await getTask(task.id); + setActiveTask(latest.task); + } catch (err) { + setError(err instanceof Error ? err.message : `Failed to queue sync for ${ticker}`); } }; - if (loading) { - return
Loading...
; + if (isPending || !isAuthenticated) { + return
Loading watchlist terminal...
; } return ( -
- + + ) : null} -
-
-

Watchlist

- -
+
+ + {error ?

{error}

: null} + {loading ? ( +

Loading watchlist...

+ ) : items.length === 0 ? ( +

No symbols yet. Add one from the right panel.

+ ) : ( +
+ {items.map((item) => ( +
+
+
+

{item.sector ?? 'Unclassified'}

+

{item.ticker}

+

{item.company_name}

+
+ +
-
- {watchlist.map((stock: any) => ( -
-
-
-

{stock.ticker}

-

{stock.company_name}

-
- -
- {stock.sector && ( -
- {stock.sector} -
- )} -
- - Filings - - - Add to Portfolio - -
-
- ))} - - {watchlist.length === 0 && ( -
-

Your watchlist is empty

-

- Add stocks to track their SEC filings and monitor performance -

+
+ + + Open stream + + + +
+
+ ))}
)} -
-
+ - {showAddModal && ( -
-
-

Add to Watchlist

-
-
- - setNewStock({...newStock, ticker: e.target.value})} - className="w-full bg-slate-700 border border-slate-600 rounded-lg px-4 py-3 text-white" - placeholder="AAPL" - required - /> -
-
- - setNewStock({...newStock, company_name: e.target.value})} - className="w-full bg-slate-700 border border-slate-600 rounded-lg px-4 py-3 text-white" - placeholder="Apple Inc." - required - /> -
-
- - setNewStock({...newStock, sector: e.target.value})} - className="w-full bg-slate-700 border border-slate-600 rounded-lg px-4 py-3 text-white" - placeholder="Technology" - /> -
-
- - -
-
-
-
- )} -
+ +
+
+ + setForm((prev) => ({ ...prev, ticker: event.target.value.toUpperCase() }))} required /> +
+
+ + setForm((prev) => ({ ...prev, companyName: event.target.value }))} required /> +
+
+ + setForm((prev) => ({ ...prev, sector: event.target.value }))} /> +
+ +
+
+
+ ); } diff --git a/frontend/components/auth/auth-shell.tsx b/frontend/components/auth/auth-shell.tsx new file mode 100644 index 0000000..b87c12c --- /dev/null +++ b/frontend/components/auth/auth-shell.tsx @@ -0,0 +1,45 @@ +import Link from 'next/link'; + +type AuthShellProps = { + title: string; + subtitle: string; + children: React.ReactNode; + footer: React.ReactNode; +}; + +export function AuthShell({ title, subtitle, children, footer }: AuthShellProps) { + return ( +
+