Improve Coolify deploy build caching

This commit is contained in:
2026-03-08 10:43:55 -04:00
parent 7a70545f09
commit 2f7933f4a3
6 changed files with 148 additions and 5 deletions

View File

@@ -1,6 +1,8 @@
# Build output and local caches
.next
.cache
.swc
.workflow-data
# Dependencies
node_modules
@@ -18,3 +20,13 @@ data
# Git
.git
.gitignore
.gitea
# Test and local tooling artifacts
.playwright-cli
e2e
output
# Docs and generated metadata
README.md
tsconfig.tsbuildinfo

View File

@@ -29,6 +29,8 @@ WORKFLOW_TARGET_WORLD=local
WORKFLOW_POSTGRES_URL=postgres://workflow:workflow@workflow-postgres:5432/workflow
WORKFLOW_POSTGRES_WORKER_CONCURRENCY=10
WORKFLOW_POSTGRES_JOB_PREFIX=fiscal_
RUN_WORKFLOW_SETUP_ON_START=true
RUN_DB_MIGRATIONS_ON_START=true
# Optional local-world fallback for rollback/testing
WORKFLOW_LOCAL_DATA_DIR=.workflow-data

View File

@@ -1,7 +1,10 @@
# syntax=docker/dockerfile:1.7
FROM oven/bun:1.3.5-alpine AS deps
WORKDIR /app
COPY package.json bun.lock ./
RUN bun install --frozen-lockfile
RUN --mount=type=cache,target=/root/.bun/install/cache \
bun install --frozen-lockfile
FROM deps AS builder
ARG NEXT_PUBLIC_API_URL=
@@ -11,8 +14,11 @@ ENV DATABASE_URL=${DATABASE_URL}
ENV NEXT_TELEMETRY_DISABLED=1
ENV WORKFLOW_TARGET_WORLD=@workflow/world-postgres
ENV WORKFLOW_LOCAL_DATA_DIR=/app/.workflow-data
ENV RUN_WORKFLOW_SETUP_ON_START=true
ENV RUN_DB_MIGRATIONS_ON_START=true
COPY . .
RUN mkdir -p public /app/.workflow-data && bun run build
RUN --mount=type=cache,target=/app/.next/cache \
mkdir -p public /app/.workflow-data && bun run build
FROM oven/bun:1.3.5-alpine AS runner
WORKDIR /app
@@ -23,12 +29,13 @@ ENV NEXT_PUBLIC_API_URL=${NEXT_PUBLIC_API_URL}
ENV NEXT_TELEMETRY_DISABLED=1
ENV WORKFLOW_TARGET_WORLD=@workflow/world-postgres
ENV WORKFLOW_LOCAL_DATA_DIR=/app/.workflow-data
ENV RUN_WORKFLOW_SETUP_ON_START=true
ENV RUN_DB_MIGRATIONS_ON_START=true
COPY --from=builder /app/public ./public
COPY --from=builder /app/.next/standalone ./
COPY --from=builder /app/.next/static ./.next/static
COPY --from=builder /app/drizzle ./drizzle
COPY --from=builder /app/drizzle.config.ts ./drizzle.config.ts
COPY --from=builder /app/scripts ./scripts
COPY --from=builder /app/lib ./lib
COPY --from=builder /app/tsconfig.json ./tsconfig.json
@@ -42,4 +49,4 @@ EXPOSE 3000
ENV PORT=3000
CMD ["sh", "-c", "if [ \"$WORKFLOW_TARGET_WORLD\" = \"@workflow/world-postgres\" ]; then ./node_modules/.bin/workflow-postgres-setup; fi && ./node_modules/.bin/drizzle-kit migrate --config /app/drizzle.config.ts && bun server.js"]
CMD ["sh", "-c", "bun run bootstrap:prod && bun server.js"]

View File

@@ -81,10 +81,16 @@ On container startup, the app applies Drizzle migrations automatically before la
The app stores SQLite data in Docker volume `fiscal_sqlite_data` (mounted to `/app/data`) and workflow world data in Postgres volume `workflow_postgres_data`.
Container startup runs:
1. `workflow-postgres-setup` (idempotent Workflow world bootstrap)
2. Drizzle migrations for SQLite app tables
2. Programmatic Drizzle migrations for SQLite app tables
3. Next.js server boot
Docker images use Bun (`oven/bun:1.3.5-alpine`) for build and runtime.
Docker builds use BuildKit cache mounts for Bun downloads and `.next/cache`, so repeated server-side builds can reuse dependency and Next/Turbopack caches on the same builder.
Optional runtime toggles:
- `RUN_WORKFLOW_SETUP_ON_START=true` keeps `workflow-postgres-setup` enabled at container boot.
- `RUN_DB_MIGRATIONS_ON_START=true` keeps SQLite migrations enabled at container boot.
## Coolify deployment
@@ -107,6 +113,9 @@ Operational constraints for Coolify:
- Ensure both named volumes are persisted (`fiscal_sqlite_data`, `workflow_postgres_data`).
- Keep `WORKFLOW_POSTGRES_URL` explicit so Workflow does not fall back to `DATABASE_URL` (SQLite).
- The app `/api/health` probes Workflow backend connectivity and returns non-200 when Workflow world is unavailable.
- Keep `Include Source Commit in Build` disabled so Docker layer cache stays reusable between commits.
- Keep Docker cleanup threshold-based rather than aggressive, otherwise Coolify will discard build cache.
- Keep repeated builds pinned to the same builder/server when possible so Docker layer cache and BuildKit cache mounts remain warm.
Emergency rollback path:
@@ -140,6 +149,8 @@ WORKFLOW_TARGET_WORLD=local
WORKFLOW_POSTGRES_URL=postgres://workflow:workflow@workflow-postgres:5432/workflow
WORKFLOW_POSTGRES_WORKER_CONCURRENCY=10
WORKFLOW_POSTGRES_JOB_PREFIX=fiscal_
RUN_WORKFLOW_SETUP_ON_START=true
RUN_DB_MIGRATIONS_ON_START=true
# Optional local-world fallback
WORKFLOW_LOCAL_DATA_DIR=.workflow-data

View File

@@ -7,6 +7,7 @@
"dev": "bun run scripts/dev.ts",
"dev:next": "bun --bun next dev --turbopack",
"build": "bun --bun next build --turbopack",
"bootstrap:prod": "bun run scripts/bootstrap-production.ts",
"start": "bun --bun next start",
"lint": "bun x tsc --noEmit",
"e2e:prepare": "bun run scripts/e2e-prepare.ts",

View File

@@ -0,0 +1,110 @@
import { spawnSync } from 'node:child_process';
import { mkdirSync } from 'node:fs';
import { dirname } from 'node:path';
import { Database } from 'bun:sqlite';
import { drizzle } from 'drizzle-orm/bun-sqlite';
import { migrate } from 'drizzle-orm/bun-sqlite/migrator';
import { resolveSqlitePath } from './dev-env';
function trim(value: string | undefined) {
const candidate = value?.trim();
return candidate ? candidate : undefined;
}
function shouldRun(value: string | undefined) {
return trim(value) !== 'false';
}
function log(message: string) {
console.info(`[bootstrap ${new Date().toISOString()}] ${message}`);
}
function formatDuration(startedAt: number) {
return `${(performance.now() - startedAt).toFixed(1)}ms`;
}
function getDatabasePath() {
const raw = trim(process.env.DATABASE_URL) || 'file:data/fiscal.sqlite';
let databasePath = raw.startsWith('file:') ? raw.slice(5) : raw;
if (databasePath.startsWith('///')) {
databasePath = databasePath.slice(2);
}
if (!databasePath) {
throw new Error('DATABASE_URL must point to a SQLite file path.');
}
if (databasePath.includes('://')) {
throw new Error(`DATABASE_URL must resolve to a SQLite file path. Received: ${raw}`);
}
return databasePath;
}
function runWorkflowSetup() {
const startedAt = performance.now();
const result = spawnSync('./node_modules/.bin/workflow-postgres-setup', [], {
env: process.env,
stdio: 'inherit'
});
if (result.error) {
throw result.error;
}
if (result.status !== 0) {
throw new Error(`workflow-postgres-setup failed with exit code ${result.status ?? 'unknown'}`);
}
log(`workflow-postgres-setup completed in ${formatDuration(startedAt)}`);
}
function runDatabaseMigrations() {
const startedAt = performance.now();
const databasePath = getDatabasePath();
if (databasePath !== ':memory:') {
const normalizedPath = resolveSqlitePath(databasePath);
mkdirSync(dirname(normalizedPath), { recursive: true });
}
const client = new Database(databasePath, { create: true });
try {
client.exec('PRAGMA foreign_keys = ON;');
migrate(drizzle(client), { migrationsFolder: './drizzle' });
} finally {
client.close();
}
log(`database migrations completed in ${formatDuration(startedAt)} (${databasePath})`);
}
const totalStartedAt = performance.now();
try {
const shouldRunWorkflowSetup = shouldRun(process.env.RUN_WORKFLOW_SETUP_ON_START)
&& trim(process.env.WORKFLOW_TARGET_WORLD) === '@workflow/world-postgres';
const shouldRunMigrations = shouldRun(process.env.RUN_DB_MIGRATIONS_ON_START);
log('starting production bootstrap');
if (shouldRunWorkflowSetup) {
runWorkflowSetup();
} else {
log('workflow-postgres-setup skipped');
}
if (shouldRunMigrations) {
runDatabaseMigrations();
} else {
log('database migrations skipped');
}
log(`production bootstrap completed in ${formatDuration(totalStartedAt)}`);
} catch (error) {
const reason = error instanceof Error ? error.message : String(error);
log(`production bootstrap failed after ${formatDuration(totalStartedAt)}: ${reason}`);
process.exit(1);
}