238 lines
5.9 KiB
TypeScript
238 lines
5.9 KiB
TypeScript
import { createOpenAI } from '@ai-sdk/openai';
|
|
import { generateText } from 'ai';
|
|
import { createZhipu } from 'zhipu-ai-provider';
|
|
|
|
type AiWorkload = 'report' | 'extraction';
|
|
type AiProvider = 'zhipu' | 'ollama';
|
|
|
|
type AiConfig = {
|
|
provider: AiProvider;
|
|
apiKey?: string;
|
|
baseUrl: string;
|
|
model: string;
|
|
temperature: number;
|
|
};
|
|
|
|
type EnvSource = Record<string, string | undefined>;
|
|
|
|
type GetAiConfigOptions = {
|
|
env?: EnvSource;
|
|
warn?: (message: string) => void;
|
|
};
|
|
|
|
type AiGenerateInput = {
|
|
model: unknown;
|
|
system?: string;
|
|
prompt: string;
|
|
temperature: number;
|
|
};
|
|
|
|
type AiGenerateOutput = {
|
|
text: string;
|
|
};
|
|
|
|
type RunAiAnalysisOptions = GetAiConfigOptions & {
|
|
workload?: AiWorkload;
|
|
createModel?: (config: AiConfig) => unknown;
|
|
generate?: (input: AiGenerateInput) => Promise<AiGenerateOutput>;
|
|
};
|
|
|
|
const CODING_API_BASE_URL = 'https://api.z.ai/api/coding/paas/v4';
|
|
const OLLAMA_BASE_URL = 'http://127.0.0.1:11434';
|
|
const OLLAMA_MODEL = 'qwen3:8b';
|
|
const OLLAMA_API_KEY = 'ollama';
|
|
|
|
let warnedIgnoredZhipuBaseUrl = false;
|
|
|
|
function envValue(name: string, env: EnvSource = process.env) {
|
|
const value = env[name];
|
|
if (!value) {
|
|
return undefined;
|
|
}
|
|
|
|
const trimmed = value.trim();
|
|
return trimmed.length > 0 ? trimmed : undefined;
|
|
}
|
|
|
|
function parseTemperature(value: string | undefined) {
|
|
const parsed = Number(value);
|
|
if (!Number.isFinite(parsed)) {
|
|
return 0.2;
|
|
}
|
|
|
|
return Math.min(Math.max(parsed, 0), 2);
|
|
}
|
|
|
|
function warnIgnoredZhipuBaseUrl(env: EnvSource, warn: (message: string) => void) {
|
|
if (warnedIgnoredZhipuBaseUrl) {
|
|
return;
|
|
}
|
|
|
|
const configuredBaseUrl = envValue('ZHIPU_BASE_URL', env);
|
|
if (!configuredBaseUrl) {
|
|
return;
|
|
}
|
|
|
|
warnedIgnoredZhipuBaseUrl = true;
|
|
warn(
|
|
`[AI SDK] ZHIPU_BASE_URL is ignored. The Coding API endpoint is hardcoded to ${CODING_API_BASE_URL}.`
|
|
);
|
|
}
|
|
|
|
function fallbackResponse(prompt: string) {
|
|
const clipped = prompt.split('\n').slice(0, 6).join(' ').slice(0, 260);
|
|
|
|
return [
|
|
'AI SDK fallback mode is active (live model configuration is missing or unavailable).',
|
|
'Thesis: Portfolio remains analyzable with local heuristics until live model access is configured.',
|
|
'Risk scan: Concentration and filing sentiment should be monitored after each sync cycle.',
|
|
`Context digest: ${clipped}`
|
|
].join('\n\n');
|
|
}
|
|
|
|
function toOpenAiCompatibleBaseUrl(baseUrl: string) {
|
|
const normalized = baseUrl.endsWith('/')
|
|
? baseUrl.slice(0, -1)
|
|
: baseUrl;
|
|
|
|
return normalized.endsWith('/v1')
|
|
? normalized
|
|
: `${normalized}/v1`;
|
|
}
|
|
|
|
function asErrorMessage(error: unknown) {
|
|
if (error instanceof Error && error.message) {
|
|
return error.message;
|
|
}
|
|
|
|
return String(error);
|
|
}
|
|
|
|
function defaultCreateModel(config: AiConfig) {
|
|
if (config.provider === 'zhipu') {
|
|
const zhipu = createZhipu({
|
|
apiKey: config.apiKey,
|
|
baseURL: config.baseUrl
|
|
});
|
|
|
|
return zhipu(config.model);
|
|
}
|
|
|
|
const openai = createOpenAI({
|
|
apiKey: config.apiKey ?? OLLAMA_API_KEY,
|
|
baseURL: toOpenAiCompatibleBaseUrl(config.baseUrl)
|
|
});
|
|
|
|
return openai.chat(config.model);
|
|
}
|
|
|
|
async function defaultGenerate(input: AiGenerateInput): Promise<AiGenerateOutput> {
|
|
const result = await generateText({
|
|
model: input.model as never,
|
|
system: input.system,
|
|
prompt: input.prompt,
|
|
temperature: input.temperature
|
|
});
|
|
|
|
return { text: result.text };
|
|
}
|
|
|
|
export function getAiConfig(options?: GetAiConfigOptions) {
|
|
return getReportAiConfig(options);
|
|
}
|
|
|
|
export function getReportAiConfig(options?: GetAiConfigOptions) {
|
|
const env = options?.env ?? process.env;
|
|
warnIgnoredZhipuBaseUrl(env, options?.warn ?? console.warn);
|
|
|
|
return {
|
|
provider: 'zhipu',
|
|
apiKey: envValue('ZHIPU_API_KEY', env),
|
|
baseUrl: CODING_API_BASE_URL,
|
|
model: envValue('ZHIPU_MODEL', env) ?? 'glm-4.7-flashx',
|
|
temperature: parseTemperature(envValue('AI_TEMPERATURE', env))
|
|
} satisfies AiConfig;
|
|
}
|
|
|
|
export function getExtractionAiConfig(options?: GetAiConfigOptions) {
|
|
const env = options?.env ?? process.env;
|
|
|
|
return {
|
|
provider: 'ollama',
|
|
apiKey: envValue('OLLAMA_API_KEY', env) ?? OLLAMA_API_KEY,
|
|
baseUrl: envValue('OLLAMA_BASE_URL', env) ?? OLLAMA_BASE_URL,
|
|
model: envValue('OLLAMA_MODEL', env) ?? OLLAMA_MODEL,
|
|
temperature: 0
|
|
} satisfies AiConfig;
|
|
}
|
|
|
|
export function isAiConfigured(options?: GetAiConfigOptions) {
|
|
const config = getReportAiConfig(options);
|
|
return Boolean(config.apiKey);
|
|
}
|
|
|
|
export async function runAiAnalysis(prompt: string, systemPrompt?: string, options?: RunAiAnalysisOptions) {
|
|
const workload = options?.workload ?? 'report';
|
|
const config = workload === 'extraction'
|
|
? getExtractionAiConfig(options)
|
|
: getReportAiConfig(options);
|
|
|
|
if (workload === 'report' && !config.apiKey) {
|
|
return {
|
|
provider: 'local-fallback',
|
|
model: config.model,
|
|
text: fallbackResponse(prompt)
|
|
};
|
|
}
|
|
|
|
const createModel = options?.createModel ?? defaultCreateModel;
|
|
const generate = options?.generate ?? defaultGenerate;
|
|
const warn = options?.warn ?? console.warn;
|
|
|
|
try {
|
|
const model = createModel(config);
|
|
|
|
const result = await generate({
|
|
model,
|
|
system: systemPrompt,
|
|
prompt,
|
|
temperature: config.temperature
|
|
});
|
|
|
|
const text = result.text.trim();
|
|
if (!text) {
|
|
if (workload === 'extraction') {
|
|
return {
|
|
provider: 'local-fallback',
|
|
model: config.model,
|
|
text: fallbackResponse(prompt)
|
|
};
|
|
}
|
|
|
|
throw new Error('AI SDK returned an empty response');
|
|
}
|
|
|
|
return {
|
|
provider: config.provider,
|
|
model: config.model,
|
|
text
|
|
};
|
|
} catch (error) {
|
|
if (workload === 'extraction') {
|
|
warn(`[AI SDK] Extraction fallback activated: ${asErrorMessage(error)}`);
|
|
|
|
return {
|
|
provider: 'local-fallback',
|
|
model: config.model,
|
|
text: fallbackResponse(prompt)
|
|
};
|
|
}
|
|
|
|
throw error;
|
|
}
|
|
}
|
|
|
|
export function __resetAiWarningsForTests() {
|
|
warnedIgnoredZhipuBaseUrl = false;
|
|
}
|