2 Commits

Author SHA1 Message Date
a2e8fbcf94 Fix search.ts to use normalizeTickerOrNull for optional ticker 2026-03-15 16:06:49 -04:00
5f0abbb007 Consolidate server utilities into shared module
- Add lib/server/utils/normalize.ts with normalizeTicker, normalizeTagsOrNull, nowIso, todayIso
- Add lib/server/utils/validation.ts with asRecord, asBoolean, asStringArray, asEnum
- Add lib/server/utils/index.ts re-exporting all utilities
- Remove duplicate lib/server/utils.ts (old file)
- Update all repos and files to use shared utilities
- Remove redundant ?? '' from normalizeTicker calls
- Update watchlist.ts to use normalizeTagsOrNull for null-return tags
2026-03-15 15:56:16 -04:00
18 changed files with 209 additions and 469 deletions

View File

@@ -1,3 +1,5 @@
import { normalizeTicker } from '@/lib/server/utils';
const YAHOO_BASE = 'https://query1.finance.yahoo.com/v8/finance/chart'; const YAHOO_BASE = 'https://query1.finance.yahoo.com/v8/finance/chart';
const QUOTE_CACHE_TTL_MS = 1000 * 60; const QUOTE_CACHE_TTL_MS = 1000 * 60;
const PRICE_HISTORY_CACHE_TTL_MS = 1000 * 60 * 15; const PRICE_HISTORY_CACHE_TTL_MS = 1000 * 60 * 15;
@@ -27,11 +29,11 @@ const quoteCache = new Map<string, QuoteCacheEntry>();
const priceHistoryCache = new Map<string, PriceHistoryCacheEntry>(); const priceHistoryCache = new Map<string, PriceHistoryCacheEntry>();
function buildYahooChartUrl(ticker: string, params: string) { function buildYahooChartUrl(ticker: string, params: string) {
return `${YAHOO_BASE}/${encodeURIComponent(ticker.trim().toUpperCase())}?${params}`; return `${YAHOO_BASE}/${encodeURIComponent(normalizeTicker(ticker))}?${params}`;
} }
export async function getQuote(ticker: string): Promise<QuoteResult> { export async function getQuote(ticker: string): Promise<QuoteResult> {
const normalizedTicker = ticker.trim().toUpperCase(); const normalizedTicker = normalizeTicker(ticker);
const cached = quoteCache.get(normalizedTicker); const cached = quoteCache.get(normalizedTicker);
if (cached && cached.expiresAt > Date.now()) { if (cached && cached.expiresAt > Date.now()) {
@@ -101,7 +103,7 @@ export async function getQuoteOrNull(ticker: string): Promise<number | null> {
} }
export async function getHistoricalClosingPrices(ticker: string, dates: string[]) { export async function getHistoricalClosingPrices(ticker: string, dates: string[]) {
const normalizedTicker = ticker.trim().toUpperCase(); const normalizedTicker = normalizeTicker(ticker);
const normalizedDates = dates const normalizedDates = dates
.map((value) => { .map((value) => {
const parsed = Date.parse(value); const parsed = Date.parse(value);
@@ -169,7 +171,7 @@ export async function getHistoricalClosingPrices(ticker: string, dates: string[]
} }
export async function getPriceHistory(ticker: string): Promise<PriceHistoryResult> { export async function getPriceHistory(ticker: string): Promise<PriceHistoryResult> {
const normalizedTicker = ticker.trim().toUpperCase(); const normalizedTicker = normalizeTicker(ticker);
const cached = priceHistoryCache.get(normalizedTicker); const cached = priceHistoryCache.get(normalizedTicker);
if (cached && cached.expiresAt > Date.now()) { if (cached && cached.expiresAt > Date.now()) {

View File

@@ -1,5 +1,6 @@
import { format } from 'date-fns'; import { format } from 'date-fns';
import type { Filing, RecentDevelopmentItem, RecentDevelopments } from '@/lib/types'; import type { Filing, RecentDevelopmentItem, RecentDevelopments } from '@/lib/types';
import { normalizeTicker } from '@/lib/server/utils';
export type RecentDevelopmentSourceContext = { export type RecentDevelopmentSourceContext = {
filings: Filing[]; filings: Filing[];
@@ -115,9 +116,9 @@ export async function getRecentDevelopments(
limit?: number; limit?: number;
} }
): Promise<RecentDevelopments> { ): Promise<RecentDevelopments> {
const normalizedTicker = ticker.trim().toUpperCase(); const normalizedTicker = normalizeTicker(ticker);
const limit = options?.limit ?? 6; const limit = options?.limit ?? 6;
const cacheKey = `${normalizedTicker}:${context.filings.map((filing) => filing.accession_number).join(',')}`; const cacheKey = `${normalizedTicker ?? ''}:${context.filings.map((filing) => filing.accession_number).join(',')}`;
const cached = recentDevelopmentsCache.get(cacheKey); const cached = recentDevelopmentsCache.get(cacheKey);
if (cached && cached.expiresAt > Date.now()) { if (cached && cached.expiresAt > Date.now()) {
@@ -128,7 +129,7 @@ export async function getRecentDevelopments(
const itemCollections = await Promise.all( const itemCollections = await Promise.all(
sources.map(async (source) => { sources.map(async (source) => {
try { try {
return await source.fetch(normalizedTicker, context); return await source.fetch(normalizedTicker ?? '', context);
} catch { } catch {
return [] satisfies RecentDevelopmentItem[]; return [] satisfies RecentDevelopmentItem[];
} }

View File

@@ -962,8 +962,10 @@ export async function upsertFilingTaxonomySnapshot(input: UpsertFilingTaxonomySn
const now = new Date().toISOString(); const now = new Date().toISOString();
const normalized = normalizeFilingTaxonomySnapshotPayload(input); const normalized = normalizeFilingTaxonomySnapshotPayload(input);
return db.transaction(async (tx) => { const [saved] = await withFinancialIngestionSchemaRetry({
const [saved] = await tx client: getSqliteClient(),
context: 'filing-taxonomy-snapshot-upsert',
operation: async () => await db
.insert(filingTaxonomySnapshot) .insert(filingTaxonomySnapshot)
.values({ .values({
filing_id: input.filing_id, filing_id: input.filing_id,
@@ -1020,23 +1022,19 @@ export async function upsertFilingTaxonomySnapshot(input: UpsertFilingTaxonomySn
updated_at: now updated_at: now
} }
}) })
.returning(); .returning()
});
const snapshotId = saved.id; const snapshotId = saved.id;
try { await db.delete(filingTaxonomyAsset).where(eq(filingTaxonomyAsset.snapshot_id, snapshotId));
await tx.delete(filingTaxonomyAsset).where(eq(filingTaxonomyAsset.snapshot_id, snapshotId)); await db.delete(filingTaxonomyContext).where(eq(filingTaxonomyContext.snapshot_id, snapshotId));
await tx.delete(filingTaxonomyContext).where(eq(filingTaxonomyContext.snapshot_id, snapshotId)); await db.delete(filingTaxonomyConcept).where(eq(filingTaxonomyConcept.snapshot_id, snapshotId));
await tx.delete(filingTaxonomyConcept).where(eq(filingTaxonomyConcept.snapshot_id, snapshotId)); await db.delete(filingTaxonomyFact).where(eq(filingTaxonomyFact.snapshot_id, snapshotId));
await tx.delete(filingTaxonomyFact).where(eq(filingTaxonomyFact.snapshot_id, snapshotId)); await db.delete(filingTaxonomyMetricValidation).where(eq(filingTaxonomyMetricValidation.snapshot_id, snapshotId));
await tx.delete(filingTaxonomyMetricValidation).where(eq(filingTaxonomyMetricValidation.snapshot_id, snapshotId));
} catch (error) {
throw new Error(`Failed to delete child records for snapshot ${snapshotId}: ${error}`);
}
if (input.contexts.length > 0) { if (input.contexts.length > 0) {
try { await db.insert(filingTaxonomyContext).values(input.contexts.map((context) => ({
await tx.insert(filingTaxonomyContext).values(input.contexts.map((context) => ({
snapshot_id: snapshotId, snapshot_id: snapshotId,
context_id: context.context_id, context_id: context.context_id,
entity_identifier: context.entity_identifier, entity_identifier: context.entity_identifier,
@@ -1048,14 +1046,10 @@ export async function upsertFilingTaxonomySnapshot(input: UpsertFilingTaxonomySn
scenario_json: context.scenario_json, scenario_json: context.scenario_json,
created_at: now created_at: now
}))); })));
} catch (error) {
throw new Error(`Failed to insert ${input.contexts.length} contexts for snapshot ${snapshotId}: ${error}`);
}
} }
if (input.assets.length > 0) { if (input.assets.length > 0) {
try { await db.insert(filingTaxonomyAsset).values(input.assets.map((asset) => ({
await tx.insert(filingTaxonomyAsset).values(input.assets.map((asset) => ({
snapshot_id: snapshotId, snapshot_id: snapshotId,
asset_type: asset.asset_type, asset_type: asset.asset_type,
name: asset.name, name: asset.name,
@@ -1065,14 +1059,10 @@ export async function upsertFilingTaxonomySnapshot(input: UpsertFilingTaxonomySn
is_selected: asset.is_selected, is_selected: asset.is_selected,
created_at: now created_at: now
}))); })));
} catch (error) {
throw new Error(`Failed to insert ${input.assets.length} assets for snapshot ${snapshotId}: ${error}`);
}
} }
if (input.concepts.length > 0) { if (input.concepts.length > 0) {
try { await db.insert(filingTaxonomyConcept).values(input.concepts.map((concept) => ({
await tx.insert(filingTaxonomyConcept).values(input.concepts.map((concept) => ({
snapshot_id: snapshotId, snapshot_id: snapshotId,
concept_key: concept.concept_key, concept_key: concept.concept_key,
qname: concept.qname, qname: concept.qname,
@@ -1097,14 +1087,10 @@ export async function upsertFilingTaxonomySnapshot(input: UpsertFilingTaxonomySn
is_abstract: concept.is_abstract, is_abstract: concept.is_abstract,
created_at: now created_at: now
}))); })));
} catch (error) {
throw new Error(`Failed to insert ${input.concepts.length} concepts for snapshot ${snapshotId}: ${error}`);
}
} }
if (input.facts.length > 0) { if (input.facts.length > 0) {
try { await db.insert(filingTaxonomyFact).values(input.facts.map((fact) => ({
await tx.insert(filingTaxonomyFact).values(input.facts.map((fact) => ({
snapshot_id: snapshotId, snapshot_id: snapshotId,
concept_key: fact.concept_key, concept_key: fact.concept_key,
qname: fact.qname, qname: fact.qname,
@@ -1133,14 +1119,10 @@ export async function upsertFilingTaxonomySnapshot(input: UpsertFilingTaxonomySn
source_file: fact.source_file, source_file: fact.source_file,
created_at: now created_at: now
}))); })));
} catch (error) {
throw new Error(`Failed to insert ${input.facts.length} facts for snapshot ${snapshotId}: ${error}`);
}
} }
if (input.metric_validations.length > 0) { if (input.metric_validations.length > 0) {
try { await db.insert(filingTaxonomyMetricValidation).values(input.metric_validations.map((check) => ({
await tx.insert(filingTaxonomyMetricValidation).values(input.metric_validations.map((check) => ({
snapshot_id: snapshotId, snapshot_id: snapshotId,
metric_key: check.metric_key, metric_key: check.metric_key,
taxonomy_value: asNumericText(check.taxonomy_value), taxonomy_value: asNumericText(check.taxonomy_value),
@@ -1156,13 +1138,9 @@ export async function upsertFilingTaxonomySnapshot(input: UpsertFilingTaxonomySn
created_at: now, created_at: now,
updated_at: now updated_at: now
}))); })));
} catch (error) {
throw new Error(`Failed to insert ${input.metric_validations.length} metric validations for snapshot ${snapshotId}: ${error}`);
}
} }
return toSnapshotRecord(saved); return toSnapshotRecord(saved);
});
} }
export async function listFilingTaxonomySnapshotsByTicker(input: { export async function listFilingTaxonomySnapshotsByTicker(input: {

View File

@@ -2,6 +2,7 @@ import { desc, eq, inArray, max } from 'drizzle-orm';
import type { Filing } from '@/lib/types'; import type { Filing } from '@/lib/types';
import { db } from '@/lib/server/db'; import { db } from '@/lib/server/db';
import { filing, filingLink } from '@/lib/server/db/schema'; import { filing, filingLink } from '@/lib/server/db/schema';
import { normalizeTicker, nowIso } from '@/lib/server/utils';
type FilingRow = typeof filing.$inferSelect; type FilingRow = typeof filing.$inferSelect;
@@ -90,7 +91,7 @@ export async function getFilingByAccession(accessionNumber: string) {
export async function listLatestFilingDatesByTickers(tickers: string[]) { export async function listLatestFilingDatesByTickers(tickers: string[]) {
const normalizedTickers = [...new Set( const normalizedTickers = [...new Set(
tickers tickers
.map((ticker) => ticker.trim().toUpperCase()) .map((ticker) => normalizeTicker(ticker))
.filter((ticker) => ticker.length > 0) .filter((ticker) => ticker.length > 0)
)]; )];
@@ -121,7 +122,7 @@ export async function upsertFilingsRecords(items: UpsertFilingInput[]) {
let updated = 0; let updated = 0;
for (const item of items) { for (const item of items) {
const now = new Date().toISOString(); const now = nowIso();
const existing = await getFilingByAccession(item.accession_number); const existing = await getFilingByAccession(item.accession_number);
@@ -192,7 +193,7 @@ export async function saveFilingAnalysis(
.update(filing) .update(filing)
.set({ .set({
analysis, analysis,
updated_at: new Date().toISOString() updated_at: nowIso()
}) })
.where(eq(filing.accession_number, accessionNumber)) .where(eq(filing.accession_number, accessionNumber))
.returning(); .returning();
@@ -208,7 +209,7 @@ export async function updateFilingMetricsById(
.update(filing) .update(filing)
.set({ .set({
metrics, metrics,
updated_at: new Date().toISOString() updated_at: nowIso()
}) })
.where(eq(filing.id, filingId)) .where(eq(filing.id, filingId))
.returning(); .returning();

View File

@@ -3,6 +3,7 @@ import type { Holding } from '@/lib/types';
import { recalculateHolding } from '@/lib/server/portfolio'; import { recalculateHolding } from '@/lib/server/portfolio';
import { db } from '@/lib/server/db'; import { db } from '@/lib/server/db';
import { filing, holding, watchlistItem } from '@/lib/server/db/schema'; import { filing, holding, watchlistItem } from '@/lib/server/db/schema';
import { normalizeTicker, nowIso } from '@/lib/server/utils';
type HoldingRow = typeof holding.$inferSelect; type HoldingRow = typeof holding.$inferSelect;
@@ -30,7 +31,7 @@ function sortByMarketValueDesc(rows: Holding[]) {
function normalizeHoldingInput(input: { ticker: string; shares: number; avgCost: number; currentPrice: number }) { function normalizeHoldingInput(input: { ticker: string; shares: number; avgCost: number; currentPrice: number }) {
return { return {
ticker: input.ticker.trim().toUpperCase(), ticker: normalizeTicker(input.ticker),
shares: input.shares.toFixed(6), shares: input.shares.toFixed(6),
avg_cost: input.avgCost.toFixed(6), avg_cost: input.avgCost.toFixed(6),
current_price: input.currentPrice.toFixed(6) current_price: input.currentPrice.toFixed(6)
@@ -82,7 +83,7 @@ export async function listUserHoldings(userId: string) {
} }
export async function getHoldingByTicker(userId: string, ticker: string) { export async function getHoldingByTicker(userId: string, ticker: string) {
const normalizedTicker = ticker.trim().toUpperCase(); const normalizedTicker = normalizeTicker(ticker);
if (!normalizedTicker) { if (!normalizedTicker) {
return null; return null;
} }
@@ -104,8 +105,8 @@ export async function upsertHoldingRecord(input: {
currentPrice?: number; currentPrice?: number;
companyName?: string; companyName?: string;
}) { }) {
const ticker = input.ticker.trim().toUpperCase(); const ticker = normalizeTicker(input.ticker);
const now = new Date().toISOString(); const now = nowIso();
const [existing] = await db const [existing] = await db
.select() .select()
@@ -251,8 +252,8 @@ export async function updateHoldingByIdRecord(input: {
shares: shares.toFixed(6), shares: shares.toFixed(6),
avg_cost: avgCost.toFixed(6), avg_cost: avgCost.toFixed(6),
current_price: currentPrice.toFixed(6), current_price: currentPrice.toFixed(6),
updated_at: new Date().toISOString(), updated_at: nowIso(),
last_price_at: new Date().toISOString() last_price_at: nowIso()
}); });
const [updated] = await db const [updated] = await db

View File

@@ -27,6 +27,14 @@ import {
} from '@/lib/server/db/schema'; } from '@/lib/server/db/schema';
import { getFilingByAccession, listFilingsRecords } from '@/lib/server/repos/filings'; import { getFilingByAccession, listFilingsRecords } from '@/lib/server/repos/filings';
import { getWatchlistItemByTicker } from '@/lib/server/repos/watchlist'; import { getWatchlistItemByTicker } from '@/lib/server/repos/watchlist';
import {
normalizeTicker,
normalizeTags,
normalizeOptionalString,
normalizeRecord,
normalizePositiveInteger,
nowIso
} from '@/lib/server/utils';
type ResearchArtifactRow = typeof researchArtifact.$inferSelect; type ResearchArtifactRow = typeof researchArtifact.$inferSelect;
type ResearchMemoRow = typeof researchMemo.$inferSelect; type ResearchMemoRow = typeof researchMemo.$inferSelect;
@@ -51,55 +59,6 @@ const RESEARCH_PACKET_SECTION_TITLES: Record<ResearchMemoSection, string> = {
next_actions: 'Next Actions' next_actions: 'Next Actions'
}; };
function normalizeTicker(ticker: string) {
return ticker.trim().toUpperCase();
}
function normalizeOptionalString(value?: string | null) {
const normalized = value?.trim();
return normalized ? normalized : null;
}
function normalizePositiveInteger(value?: number | null) {
if (value === null || value === undefined || !Number.isFinite(value)) {
return null;
}
const normalized = Math.trunc(value);
return normalized > 0 ? normalized : null;
}
function normalizeRecord(value?: Record<string, unknown> | null) {
if (!value || typeof value !== 'object' || Array.isArray(value)) {
return null;
}
return value;
}
function normalizeTags(tags?: string[] | null) {
if (!Array.isArray(tags)) {
return [];
}
const unique = new Set<string>();
for (const entry of tags) {
if (typeof entry !== 'string') {
continue;
}
const normalized = entry.trim();
if (!normalized) {
continue;
}
unique.add(normalized);
}
return [...unique];
}
function buildArtifactSearchText(input: { function buildArtifactSearchText(input: {
title?: string | null; title?: string | null;
summary?: string | null; summary?: string | null;
@@ -409,7 +368,7 @@ export async function createResearchArtifactRecord(input: {
throw new Error('ticker is required'); throw new Error('ticker is required');
} }
const now = new Date().toISOString(); const now = nowIso();
const title = normalizeOptionalString(input.title); const title = normalizeOptionalString(input.title);
const summary = normalizeOptionalString(input.summary); const summary = normalizeOptionalString(input.summary);
const bodyMarkdown = normalizeOptionalString(input.bodyMarkdown); const bodyMarkdown = normalizeOptionalString(input.bodyMarkdown);
@@ -520,7 +479,7 @@ export async function upsertSystemResearchArtifact(input: {
search_text: searchText, search_text: searchText,
tags: normalizeTags(input.tags), tags: normalizeTags(input.tags),
metadata: normalizeRecord(input.metadata), metadata: normalizeRecord(input.metadata),
updated_at: new Date().toISOString() updated_at: nowIso()
}) })
.where(eq(researchArtifact.id, existing.id)) .where(eq(researchArtifact.id, existing.id))
.returning(); .returning();
@@ -566,7 +525,7 @@ export async function updateResearchArtifactRecord(input: {
metadata, metadata,
tags, tags,
search_text: searchText, search_text: searchText,
updated_at: new Date().toISOString() updated_at: nowIso()
}) })
.where(eq(researchArtifact.id, input.id)) .where(eq(researchArtifact.id, input.id))
.returning(); .returning();
@@ -702,7 +661,7 @@ export async function upsertResearchMemoRecord(input: {
throw new Error('ticker is required'); throw new Error('ticker is required');
} }
const now = new Date().toISOString(); const now = nowIso();
const existing = await getResearchMemoByTicker(input.userId, ticker); const existing = await getResearchMemoByTicker(input.userId, ticker);
if (!existing) { if (!existing) {
@@ -796,7 +755,7 @@ export async function addResearchMemoEvidenceLink(input: {
.then((rows) => (rows[0]?.maxOrder ?? 0) + 1) .then((rows) => (rows[0]?.maxOrder ?? 0) + 1)
: Math.max(0, Math.trunc(input.sortOrder)); : Math.max(0, Math.trunc(input.sortOrder));
const now = new Date().toISOString(); const now = nowIso();
if (existing.length > 0) { if (existing.length > 0) {
await db await db
.update(researchMemoEvidence) .update(researchMemoEvidence)
@@ -894,7 +853,7 @@ export async function getResearchPacket(userId: string, ticker: string): Promise
return { return {
ticker: normalizedTicker, ticker: normalizedTicker,
companyName: coverage?.company_name ?? latestFiling?.company_name ?? null, companyName: coverage?.company_name ?? latestFiling?.company_name ?? null,
generated_at: new Date().toISOString(), generated_at: nowIso(),
memo, memo,
sections: toPacketSections(memo, evidenceBySection) sections: toPacketSections(memo, evidenceBySection)
}; };

View File

@@ -3,6 +3,7 @@ import type { Task, TaskStage, TaskStageContext, TaskStageEvent, TaskStatus, Tas
import { db } from '@/lib/server/db'; import { db } from '@/lib/server/db';
import { taskRun, taskStageEvent } from '@/lib/server/db/schema'; import { taskRun, taskStageEvent } from '@/lib/server/db/schema';
import { buildTaskNotification } from '@/lib/server/task-notifications'; import { buildTaskNotification } from '@/lib/server/task-notifications';
import { nowIso } from '@/lib/server/utils';
type TaskRow = typeof taskRun.$inferSelect; type TaskRow = typeof taskRun.$inferSelect;
type TaskStageEventRow = typeof taskStageEvent.$inferSelect; type TaskStageEventRow = typeof taskStageEvent.$inferSelect;
@@ -110,7 +111,7 @@ async function insertTaskStageEvent(executor: InsertExecutor, input: EventInsert
} }
export async function createTaskRunRecord(input: CreateTaskInput) { export async function createTaskRunRecord(input: CreateTaskInput) {
const now = new Date().toISOString(); const now = nowIso();
return await db.transaction(async (tx) => { return await db.transaction(async (tx) => {
const [row] = await tx const [row] = await tx
@@ -219,7 +220,7 @@ async function attemptAtomicInsert(
} }
export async function createTaskRunRecordAtomic(input: CreateTaskInput): Promise<AtomicCreateResult> { export async function createTaskRunRecordAtomic(input: CreateTaskInput): Promise<AtomicCreateResult> {
const now = new Date().toISOString(); const now = nowIso();
return await db.transaction(async (tx) => { return await db.transaction(async (tx) => {
const result = await attemptAtomicInsert(tx, input, now); const result = await attemptAtomicInsert(tx, input, now);
@@ -235,7 +236,7 @@ export async function setTaskWorkflowRunId(taskId: string, workflowRunId: string
.update(taskRun) .update(taskRun)
.set({ .set({
workflow_run_id: workflowRunId, workflow_run_id: workflowRunId,
updated_at: new Date().toISOString() updated_at: nowIso()
}) })
.where(eq(taskRun.id, taskId)); .where(eq(taskRun.id, taskId));
} }
@@ -313,7 +314,7 @@ export async function findInFlightTaskByResourceKey(
} }
export async function markTaskRunning(taskId: string) { export async function markTaskRunning(taskId: string) {
const now = new Date().toISOString(); const now = nowIso();
return await db.transaction(async (tx) => { return await db.transaction(async (tx) => {
const [row] = await tx const [row] = await tx
@@ -354,7 +355,7 @@ export async function updateTaskStage(
detail: string | null = null, detail: string | null = null,
context: TaskStageContext | null = null context: TaskStageContext | null = null
) { ) {
const now = new Date().toISOString(); const now = nowIso();
return await db.transaction(async (tx) => { return await db.transaction(async (tx) => {
const [current] = await tx const [current] = await tx
@@ -403,7 +404,7 @@ export async function completeTask(
result: Record<string, unknown>, result: Record<string, unknown>,
completion: TaskCompletionState = {} completion: TaskCompletionState = {}
) { ) {
const now = new Date().toISOString(); const now = nowIso();
return await db.transaction(async (tx) => { return await db.transaction(async (tx) => {
const [row] = await tx const [row] = await tx
@@ -445,7 +446,7 @@ export async function markTaskFailure(
stage: TaskStage = 'failed', stage: TaskStage = 'failed',
failure: TaskCompletionState = {} failure: TaskCompletionState = {}
) { ) {
const now = new Date().toISOString(); const now = nowIso();
return await db.transaction(async (tx) => { return await db.transaction(async (tx) => {
const [row] = await tx const [row] = await tx
@@ -513,7 +514,7 @@ export async function setTaskStatusFromWorkflow(
return toTask(current); return toTask(current);
} }
const now = new Date().toISOString(); const now = nowIso();
const [row] = await tx const [row] = await tx
.update(taskRun) .update(taskRun)
.set({ .set({
@@ -551,7 +552,7 @@ export async function updateTaskNotificationState(
userId: string, userId: string,
input: UpdateTaskNotificationStateInput input: UpdateTaskNotificationStateInput
) { ) {
const now = new Date().toISOString(); const now = nowIso();
const patch: Partial<typeof taskRun.$inferInsert> = { const patch: Partial<typeof taskRun.$inferInsert> = {
updated_at: now updated_at: now
}; };

View File

@@ -6,38 +6,12 @@ import type {
} from '@/lib/types'; } from '@/lib/types';
import { db } from '@/lib/server/db'; import { db } from '@/lib/server/db';
import { watchlistItem } from '@/lib/server/db/schema'; import { watchlistItem } from '@/lib/server/db/schema';
import { normalizeTicker, normalizeTagsOrNull, nowIso } from '@/lib/server/utils';
type WatchlistRow = typeof watchlistItem.$inferSelect; type WatchlistRow = typeof watchlistItem.$inferSelect;
const DEFAULT_STATUS: CoverageStatus = 'backlog'; const DEFAULT_STATUS: CoverageStatus = 'backlog';
const DEFAULT_PRIORITY: CoveragePriority = 'medium'; const DEFAULT_PRIORITY: CoveragePriority = 'medium';
function normalizeTags(tags?: string[]) {
if (!Array.isArray(tags)) {
return null;
}
const unique = new Set<string>();
for (const entry of tags) {
if (typeof entry !== 'string') {
continue;
}
const tag = entry.trim();
if (!tag) {
continue;
}
unique.add(tag);
}
if (unique.size === 0) {
return null;
}
return [...unique];
}
function toWatchlistItem(row: WatchlistRow, latestFilingDate: string | null = null): WatchlistItem { function toWatchlistItem(row: WatchlistRow, latestFilingDate: string | null = null): WatchlistItem {
return { return {
id: row.id, id: row.id,
@@ -79,7 +53,7 @@ export async function getWatchlistItemById(userId: string, id: number) {
} }
export async function getWatchlistItemByTicker(userId: string, ticker: string) { export async function getWatchlistItemByTicker(userId: string, ticker: string) {
const normalizedTicker = ticker.trim().toUpperCase(); const normalizedTicker = normalizeTicker(ticker);
if (!normalizedTicker) { if (!normalizedTicker) {
return null; return null;
} }
@@ -104,15 +78,15 @@ export async function upsertWatchlistItemRecord(input: {
priority?: CoveragePriority; priority?: CoveragePriority;
lastReviewedAt?: string | null; lastReviewedAt?: string | null;
}) { }) {
const normalizedTicker = input.ticker.trim().toUpperCase(); const normalizedTicker = normalizeTicker(input.ticker);
const normalizedSector = input.sector?.trim() ? input.sector.trim() : null; const normalizedSector = input.sector?.trim() ? input.sector.trim() : null;
const normalizedCategory = input.category?.trim() ? input.category.trim() : null; const normalizedCategory = input.category?.trim() ? input.category.trim() : null;
const normalizedTags = normalizeTags(input.tags); const normalizedTags = normalizeTagsOrNull(input.tags);
const normalizedCompanyName = input.companyName.trim(); const normalizedCompanyName = input.companyName.trim();
const normalizedLastReviewedAt = input.lastReviewedAt?.trim() const normalizedLastReviewedAt = input.lastReviewedAt?.trim()
? input.lastReviewedAt.trim() ? input.lastReviewedAt.trim()
: null; : null;
const now = new Date().toISOString(); const timestamp = nowIso();
const [inserted] = await db const [inserted] = await db
.insert(watchlistItem) .insert(watchlistItem)
@@ -125,8 +99,8 @@ export async function upsertWatchlistItemRecord(input: {
tags: normalizedTags, tags: normalizedTags,
status: input.status ?? DEFAULT_STATUS, status: input.status ?? DEFAULT_STATUS,
priority: input.priority ?? DEFAULT_PRIORITY, priority: input.priority ?? DEFAULT_PRIORITY,
created_at: now, created_at: timestamp,
updated_at: now, updated_at: timestamp,
last_reviewed_at: normalizedLastReviewedAt last_reviewed_at: normalizedLastReviewedAt
}) })
.onConflictDoNothing({ .onConflictDoNothing({
@@ -156,7 +130,7 @@ export async function upsertWatchlistItemRecord(input: {
tags: normalizedTags, tags: normalizedTags,
status: input.status ?? existing?.status ?? DEFAULT_STATUS, status: input.status ?? existing?.status ?? DEFAULT_STATUS,
priority: input.priority ?? existing?.priority ?? DEFAULT_PRIORITY, priority: input.priority ?? existing?.priority ?? DEFAULT_PRIORITY,
updated_at: now, updated_at: timestamp,
last_reviewed_at: normalizedLastReviewedAt ?? existing?.last_reviewed_at ?? null last_reviewed_at: normalizedLastReviewedAt ?? existing?.last_reviewed_at ?? null
}) })
.where(and(eq(watchlistItem.user_id, input.userId), eq(watchlistItem.ticker, normalizedTicker))) .where(and(eq(watchlistItem.user_id, input.userId), eq(watchlistItem.ticker, normalizedTicker)))
@@ -212,7 +186,7 @@ export async function updateWatchlistItemRecord(input: {
: null; : null;
const nextTags = input.tags === undefined const nextTags = input.tags === undefined
? existing.tags ?? null ? existing.tags ?? null
: normalizeTags(input.tags); : normalizeTagsOrNull(input.tags);
const nextLastReviewedAt = input.lastReviewedAt === undefined const nextLastReviewedAt = input.lastReviewedAt === undefined
? existing.last_reviewed_at ? existing.last_reviewed_at
: input.lastReviewedAt?.trim() : input.lastReviewedAt?.trim()
@@ -228,7 +202,7 @@ export async function updateWatchlistItemRecord(input: {
tags: nextTags, tags: nextTags,
status: input.status ?? existing.status ?? DEFAULT_STATUS, status: input.status ?? existing.status ?? DEFAULT_STATUS,
priority: input.priority ?? existing.priority ?? DEFAULT_PRIORITY, priority: input.priority ?? existing.priority ?? DEFAULT_PRIORITY,
updated_at: new Date().toISOString(), updated_at: nowIso(),
last_reviewed_at: nextLastReviewedAt last_reviewed_at: nextLastReviewedAt
}) })
.where(and(eq(watchlistItem.user_id, input.userId), eq(watchlistItem.id, input.id))) .where(and(eq(watchlistItem.user_id, input.userId), eq(watchlistItem.id, input.id)))
@@ -242,7 +216,7 @@ export async function updateWatchlistReviewByTicker(
ticker: string, ticker: string,
reviewedAt: string reviewedAt: string
) { ) {
const normalizedTicker = ticker.trim().toUpperCase(); const normalizedTicker = normalizeTicker(ticker);
if (!normalizedTicker) { if (!normalizedTicker) {
return null; return null;
} }

View File

@@ -17,6 +17,7 @@ import {
listResearchJournalEntries, listResearchJournalEntries,
listResearchJournalEntriesForUser listResearchJournalEntriesForUser
} from '@/lib/server/repos/research-journal'; } from '@/lib/server/repos/research-journal';
import { normalizeTickerOrNull } from '@/lib/server/utils';
type SearchDocumentScope = 'global' | 'user'; type SearchDocumentScope = 'global' | 'user';
type SearchDocumentSourceKind = 'filing_document' | 'filing_brief' | 'research_note'; type SearchDocumentSourceKind = 'filing_document' | 'filing_brief' | 'research_note';
@@ -131,11 +132,6 @@ function escapeLike(value: string) {
return value.replace(/[%_]/g, (match) => `\\${match}`); return value.replace(/[%_]/g, (match) => `\\${match}`);
} }
function normalizeTicker(value: string | null | undefined) {
const normalized = value?.trim().toUpperCase() ?? '';
return normalized.length > 0 ? normalized : null;
}
function normalizeSearchSources(sources?: SearchSource[]) { function normalizeSearchSources(sources?: SearchSource[]) {
const normalized = new Set<SearchSource>(); const normalized = new Set<SearchSource>();
@@ -1205,7 +1201,7 @@ export async function searchKnowledgeBase(input: SearchInput) {
} }
const limit = clampLimit(input.limit); const limit = clampLimit(input.limit);
const normalizedTicker = normalizeTicker(input.ticker); const normalizedTicker = normalizeTickerOrNull(input.ticker);
const includedSources = normalizeSearchSources(input.sources); const includedSources = normalizeSearchSources(input.sources);
const client = getSqliteClient(); const client = getSqliteClient();
const [queryEmbedding] = await runAiEmbeddings([normalizedQuery]); const [queryEmbedding] = await runAiEmbeddings([normalizedQuery]);

View File

@@ -1,4 +1,5 @@
import type { CompanyProfile, CompanyValuationSnapshot } from '@/lib/types'; import type { CompanyProfile, CompanyValuationSnapshot } from '@/lib/types';
import { normalizeTicker } from '@/lib/server/utils';
type FetchImpl = typeof fetch; type FetchImpl = typeof fetch;
@@ -261,7 +262,7 @@ export async function getSecCompanyProfile(
ticker: string, ticker: string,
options?: { fetchImpl?: FetchImpl } options?: { fetchImpl?: FetchImpl }
): Promise<SecCompanyProfileResult | null> { ): Promise<SecCompanyProfileResult | null> {
const normalizedTicker = ticker.trim().toUpperCase(); const normalizedTicker = normalizeTicker(ticker);
if (!normalizedTicker) { if (!normalizedTicker) {
return null; return null;
} }

View File

@@ -39,6 +39,7 @@ import {
} from '@/lib/server/sec'; } from '@/lib/server/sec';
import { enqueueTask } from '@/lib/server/tasks'; import { enqueueTask } from '@/lib/server/tasks';
import { hydrateFilingTaxonomySnapshot } from '@/lib/server/taxonomy/engine'; import { hydrateFilingTaxonomySnapshot } from '@/lib/server/taxonomy/engine';
import { nowIso } from '@/lib/server/utils';
const EXTRACTION_REQUIRED_KEYS = [ const EXTRACTION_REQUIRED_KEYS = [
'summary', 'summary',
@@ -762,7 +763,7 @@ async function processSyncFilings(task: Task) {
await deleteCompanyFinancialBundlesForTicker(filing.ticker); await deleteCompanyFinancialBundlesForTicker(filing.ticker);
taxonomySnapshotsHydrated += 1; taxonomySnapshotsHydrated += 1;
} catch (error) { } catch (error) {
const now = new Date().toISOString(); const now = nowIso();
await upsertFilingTaxonomySnapshot({ await upsertFilingTaxonomySnapshot({
filing_id: filing.id, filing_id: filing.id,
ticker: filing.ticker, ticker: filing.ticker,
@@ -961,7 +962,7 @@ async function processRefreshPrices(task: Task) {
} }
} }
); );
const updatedCount = await applyRefreshedPrices(userId, quotes, new Date().toISOString()); const updatedCount = await applyRefreshedPrices(userId, quotes, nowIso());
const result = { const result = {
updatedCount, updatedCount,

View File

@@ -49,9 +49,6 @@ function createHydrationResult(): TaxonomyHydrationResult {
unmapped_row_count: 0, unmapped_row_count: 0,
material_unmapped_row_count: 0, material_unmapped_row_count: 0,
warnings: ['rust_warning'] warnings: ['rust_warning']
},
xbrl_validation: {
status: 'passed'
} }
}; };
} }

View File

@@ -1,7 +1,6 @@
import { existsSync } from 'node:fs'; import { existsSync } from 'node:fs';
import { join } from 'node:path'; import { join } from 'node:path';
import type { TaxonomyHydrationInput, TaxonomyHydrationResult } from '@/lib/server/taxonomy/types'; import type { TaxonomyHydrationInput, TaxonomyHydrationResult } from '@/lib/server/taxonomy/types';
import { withRetry } from '@/lib/server/utils/retry';
function candidateBinaryPaths() { function candidateBinaryPaths() {
return [ return [
@@ -24,10 +23,6 @@ export function resolveFiscalXbrlBinary() {
export async function hydrateFilingTaxonomySnapshotFromSidecar( export async function hydrateFilingTaxonomySnapshotFromSidecar(
input: TaxonomyHydrationInput input: TaxonomyHydrationInput
): Promise<TaxonomyHydrationResult> { ): Promise<TaxonomyHydrationResult> {
return withRetry(() => hydrateFromSidecarImpl(input));
}
async function hydrateFromSidecarImpl(input: TaxonomyHydrationInput): Promise<TaxonomyHydrationResult> {
const binary = resolveFiscalXbrlBinary(); const binary = resolveFiscalXbrlBinary();
const timeoutMs = Math.max(Number(process.env.XBRL_ENGINE_TIMEOUT_MS ?? 45_000), 1_000); const timeoutMs = Math.max(Number(process.env.XBRL_ENGINE_TIMEOUT_MS ?? 45_000), 1_000);
const command = [binary, 'hydrate-filing']; const command = [binary, 'hydrate-filing'];

View File

@@ -204,11 +204,6 @@ export type TaxonomyHydrationNormalizationSummary = {
warnings: string[]; warnings: string[];
}; };
export type XbrlValidationResult = {
status: 'passed' | 'warning' | 'error';
message?: string;
};
export type TaxonomyHydrationInput = { export type TaxonomyHydrationInput = {
filingId: number; filingId: number;
ticker: string; ticker: string;
@@ -284,5 +279,4 @@ export type TaxonomyHydrationResult = {
}>; }>;
metric_validations: TaxonomyMetricValidationCheck[]; metric_validations: TaxonomyMetricValidationCheck[];
normalization_summary: TaxonomyHydrationNormalizationSummary; normalization_summary: TaxonomyHydrationNormalizationSummary;
xbrl_validation: XbrlValidationResult;
}; };

View File

@@ -18,5 +18,3 @@ export {
asStringArray, asStringArray,
asEnum asEnum
} from './validation'; } from './validation';
export { withRetry, type RetryOptions } from './retry';

View File

@@ -1,59 +0,0 @@
export interface RetryOptions {
maxRetries: number;
baseDelayMs: number;
maxDelayMs: number;
jitterFactor: number;
retryableErrors: RegExp[];
}
const DEFAULT_RETRY_OPTIONS: RetryOptions = {
maxRetries: 3,
baseDelayMs: 2000,
maxDelayMs: 10000,
jitterFactor: 0.3,
retryableErrors: [
/timeout/i,
/ECONNRESET/,
/ETIMEDOUT/,
/ENOTFOUND/,
/exit code 1/,
/signal/,
/killed/
]
};
export async function withRetry<T>(
fn: () => Promise<T>,
options?: Partial<RetryOptions>
): Promise<T> {
const opts = { ...DEFAULT_RETRY_OPTIONS, ...options };
let lastError: Error | null = null;
for (let attempt = 0; attempt < opts.maxRetries; attempt++) {
try {
return await fn();
} catch (error) {
lastError = error instanceof Error ? error : new Error(String(error));
const isRetryable = opts.retryableErrors.some(
(pattern) => pattern.test(lastError!.message)
);
if (!isRetryable || attempt === opts.maxRetries - 1) {
throw lastError;
}
const baseDelay = opts.baseDelayMs * Math.pow(2, attempt);
const jitter = Math.random() * opts.jitterFactor * baseDelay;
const delay = Math.min(baseDelay + jitter, opts.maxDelayMs);
console.warn(
`[retry] Attempt ${attempt + 1}/${opts.maxRetries} failed, retrying in ${Math.round(delay)}ms: ${lastError.message}`
);
await Bun.sleep(delay);
}
}
throw lastError;
}

View File

@@ -1,3 +1,5 @@
import { normalizeTicker } from '@/lib/server/utils';
type FetchImpl = typeof fetch; type FetchImpl = typeof fetch;
type CacheEntry<T> = { type CacheEntry<T> = {
@@ -142,7 +144,7 @@ export async function getYahooCompanyDescription(
ticker: string, ticker: string,
options?: { fetchImpl?: FetchImpl } options?: { fetchImpl?: FetchImpl }
) { ) {
const normalizedTicker = ticker.trim().toUpperCase(); const normalizedTicker = normalizeTicker(ticker);
if (!normalizedTicker) { if (!normalizedTicker) {
return null; return null;
} }

View File

@@ -4,8 +4,6 @@ use regex::Regex;
use reqwest::blocking::Client; use reqwest::blocking::Client;
use serde::{Deserialize, Serialize}; use serde::{Deserialize, Serialize};
use std::collections::{BTreeMap, HashMap, HashSet}; use std::collections::{BTreeMap, HashMap, HashSet};
use std::sync::Mutex;
use std::time::{Duration, Instant};
mod kpi_mapper; mod kpi_mapper;
mod metrics; mod metrics;
@@ -22,39 +20,6 @@ use crabrl as _;
pub const PARSER_ENGINE: &str = "fiscal-xbrl"; pub const PARSER_ENGINE: &str = "fiscal-xbrl";
pub const PARSER_VERSION: &str = env!("CARGO_PKG_VERSION"); pub const PARSER_VERSION: &str = env!("CARGO_PKG_VERSION");
const DEFAULT_SEC_RATE_LIMIT_MS: u64 = 100;
const HTTP_TIMEOUT_SECS: u64 = 30;
static RATE_LIMITER: Lazy<Mutex<Instant>> = Lazy::new(|| Mutex::new(Instant::now()));
fn sec_rate_limit_delay() -> u64 {
std::env::var("SEC_RATE_LIMIT_MS")
.ok()
.and_then(|s| s.parse().ok())
.unwrap_or(DEFAULT_SEC_RATE_LIMIT_MS)
}
fn rate_limited_fetch<T, F>(fetch_fn: F) -> Result<T>
where
F: FnOnce() -> Result<T>,
{
let delay_ms = sec_rate_limit_delay();
{
let mut last_request = RATE_LIMITER.lock().unwrap();
let elapsed = last_request.elapsed();
let min_delay = Duration::from_millis(delay_ms);
if elapsed < min_delay {
std::thread::sleep(min_delay - elapsed);
}
*last_request = Instant::now();
}
fetch_fn()
}
static CONTEXT_RE: Lazy<Regex> = Lazy::new(|| { static CONTEXT_RE: Lazy<Regex> = Lazy::new(|| {
Regex::new(r#"(?is)<(?:[a-z0-9_\-]+:)?context\b[^>]*\bid=["']([^"']+)["'][^>]*>(.*?)</(?:[a-z0-9_\-]+:)?context>"#).unwrap() Regex::new(r#"(?is)<(?:[a-z0-9_\-]+:)?context\b[^>]*\bid=["']([^"']+)["'][^>]*>(.*?)</(?:[a-z0-9_\-]+:)?context>"#).unwrap()
}); });
@@ -153,7 +118,6 @@ pub struct HydrateFilingResponse {
pub contexts: Vec<ContextOutput>, pub contexts: Vec<ContextOutput>,
pub derived_metrics: FilingMetrics, pub derived_metrics: FilingMetrics,
pub validation_result: ValidationResultOutput, pub validation_result: ValidationResultOutput,
pub xbrl_validation: XbrlValidationResult,
pub facts_count: usize, pub facts_count: usize,
pub concepts_count: usize, pub concepts_count: usize,
pub dimensions_count: usize, pub dimensions_count: usize,
@@ -517,7 +481,6 @@ struct PresentationNode {
pub fn hydrate_filing(input: HydrateFilingRequest) -> Result<HydrateFilingResponse> { pub fn hydrate_filing(input: HydrateFilingRequest) -> Result<HydrateFilingResponse> {
let client = Client::builder() let client = Client::builder()
.user_agent("Fiscal Clone <support@fiscal.local>") .user_agent("Fiscal Clone <support@fiscal.local>")
.timeout(Duration::from_secs(120))
.build() .build()
.context("unable to build HTTP client")?; .context("unable to build HTTP client")?;
@@ -558,11 +521,7 @@ pub fn hydrate_filing(input: HydrateFilingRequest) -> Result<HydrateFilingRespon
computed_definitions: vec![], computed_definitions: vec![],
contexts: vec![], contexts: vec![],
derived_metrics: FilingMetrics::default(), derived_metrics: FilingMetrics::default(),
validation_result: validation_result.clone(), validation_result,
xbrl_validation: XbrlValidationResult {
status: "error".to_string(),
message: Some("No XBRL instance found".to_string()),
},
facts_count: 0, facts_count: 0,
concepts_count: 0, concepts_count: 0,
dimensions_count: 0, dimensions_count: 0,
@@ -583,17 +542,6 @@ pub fn hydrate_filing(input: HydrateFilingRequest) -> Result<HydrateFilingRespon
let instance_text = fetch_text(&client, &instance_asset.url) let instance_text = fetch_text(&client, &instance_asset.url)
.context("fetch request failed for XBRL instance")?; .context("fetch request failed for XBRL instance")?;
let xbrl_validation = validate_xbrl_structure(&instance_text, Some(&instance_asset.name));
if xbrl_validation.status != "passed" {
eprintln!(
"[xbrl] Validation {} for {:?}: {}",
xbrl_validation.status,
instance_asset.name,
xbrl_validation.message.as_deref().unwrap_or("unknown")
);
}
let parsed_instance = parse_xbrl_instance(&instance_text, Some(instance_asset.name.clone())); let parsed_instance = parse_xbrl_instance(&instance_text, Some(instance_asset.name.clone()));
let mut label_by_concept = HashMap::new(); let mut label_by_concept = HashMap::new();
@@ -730,7 +678,6 @@ pub fn hydrate_filing(input: HydrateFilingRequest) -> Result<HydrateFilingRespon
contexts: parsed_instance.contexts, contexts: parsed_instance.contexts,
derived_metrics: metrics::derive_metrics(&facts), derived_metrics: metrics::derive_metrics(&facts),
validation_result, validation_result,
xbrl_validation,
facts_count: facts.len(), facts_count: facts.len(),
concepts_count: concepts.len(), concepts_count: concepts.len(),
dimensions_count: facts dimensions_count: facts
@@ -1016,10 +963,8 @@ fn parse_size(value: Option<&serde_json::Value>) -> Option<i64> {
} }
fn fetch_text(client: &Client, url: &str) -> Result<String> { fn fetch_text(client: &Client, url: &str) -> Result<String> {
rate_limited_fetch(|| {
let response = client let response = client
.get(url) .get(url)
.timeout(Duration::from_secs(HTTP_TIMEOUT_SECS))
.send() .send()
.with_context(|| format!("request failed for {url}"))?; .with_context(|| format!("request failed for {url}"))?;
if !response.status().is_success() { if !response.status().is_success() {
@@ -1028,14 +973,11 @@ fn fetch_text(client: &Client, url: &str) -> Result<String> {
response response
.text() .text()
.with_context(|| format!("unable to read response body for {url}")) .with_context(|| format!("unable to read response body for {url}"))
})
} }
fn fetch_json<T: for<'de> Deserialize<'de>>(client: &Client, url: &str) -> Result<T> { fn fetch_json<T: for<'de> Deserialize<'de>>(client: &Client, url: &str) -> Result<T> {
rate_limited_fetch(|| {
let response = client let response = client
.get(url) .get(url)
.timeout(Duration::from_secs(HTTP_TIMEOUT_SECS))
.send() .send()
.with_context(|| format!("request failed for {url}"))?; .with_context(|| format!("request failed for {url}"))?;
if !response.status().is_success() { if !response.status().is_success() {
@@ -1044,50 +986,6 @@ fn fetch_json<T: for<'de> Deserialize<'de>>(client: &Client, url: &str) -> Resul
response response
.json::<T>() .json::<T>()
.with_context(|| format!("unable to parse JSON response for {url}")) .with_context(|| format!("unable to parse JSON response for {url}"))
})
}
#[derive(Debug, Clone, Serialize)]
pub struct XbrlValidationResult {
pub status: String,
#[serde(skip_serializing_if = "Option::is_none")]
pub message: Option<String>,
}
fn validate_xbrl_structure(xml: &str, source_file: Option<&str>) -> XbrlValidationResult {
if xml.is_empty() {
return XbrlValidationResult {
status: "error".to_string(),
message: Some("XBRL content is empty".to_string()),
};
}
if !xml.contains("<xbrl") && !xml.contains("<xbrli:xbrl") {
return XbrlValidationResult {
status: "error".to_string(),
message: Some("Invalid XBRL: missing root element".to_string()),
};
}
let open_count = xml.matches('<').count();
let close_count = xml.matches('>').count();
if open_count != close_count {
return XbrlValidationResult {
status: "warning".to_string(),
message: Some(format!(
"Malformed XML detected in {:?} ({} open, {} close tags)",
source_file.unwrap_or("unknown"),
open_count,
close_count
)),
};
}
XbrlValidationResult {
status: "passed".to_string(),
message: None,
}
} }
struct ParsedInstance { struct ParsedInstance {