Files
Neon-Desk/lib/server/ai.test.ts

281 lines
8.3 KiB
TypeScript

import { beforeEach, describe, expect, it, mock } from 'bun:test';
import {
__resetAiWarningsForTests,
getAiConfig,
getExtractionAiConfig,
runAiAnalysis
} from './ai';
const CODING_API_BASE_URL = 'https://api.z.ai/api/coding/paas/v4';
describe('ai config and runtime', () => {
beforeEach(() => {
__resetAiWarningsForTests();
});
it('uses coding endpoint defaults when optional env values are missing', () => {
const config = getAiConfig({
env: {
ZHIPU_API_KEY: 'key'
},
warn: () => {}
});
expect(config.apiKey).toBe('key');
expect(config.baseUrl).toBe(CODING_API_BASE_URL);
expect(config.model).toBe('glm-4.7-flashx');
expect(config.temperature).toBe(0.2);
});
it('ignores ZHIPU_BASE_URL and keeps the hardcoded coding endpoint', () => {
const config = getAiConfig({
env: {
ZHIPU_API_KEY: 'key',
ZHIPU_BASE_URL: 'https://api.z.ai/api/paas/v4'
},
warn: () => {}
});
expect(config.baseUrl).toBe(CODING_API_BASE_URL);
});
it('clamps temperature into [0, 2]', () => {
const negative = getAiConfig({
env: {
ZHIPU_API_KEY: 'key',
AI_TEMPERATURE: '-2'
},
warn: () => {}
});
expect(negative.temperature).toBe(0);
const high = getAiConfig({
env: {
ZHIPU_API_KEY: 'key',
AI_TEMPERATURE: '9'
},
warn: () => {}
});
expect(high.temperature).toBe(2);
const invalid = getAiConfig({
env: {
ZHIPU_API_KEY: 'key',
AI_TEMPERATURE: 'not-a-number'
},
warn: () => {}
});
expect(invalid.temperature).toBe(0.2);
});
it('returns fallback output when ZHIPU_API_KEY is missing', async () => {
const generate = mock(async () => ({ text: 'should-not-be-used' }));
const result = await runAiAnalysis(
'Prompt line one\nPrompt line two',
'System prompt',
{
env: {},
warn: () => {},
generate
}
);
expect(result.provider).toBe('local-fallback');
expect(result.model).toBe('glm-4.7-flashx');
expect(result.text).toContain('AI SDK fallback mode is active');
expect(generate).not.toHaveBeenCalled();
});
it('warns once when ZHIPU_BASE_URL is set because coding endpoint is hardcoded', () => {
const warn = mock((_message: string) => {});
const env = {
ZHIPU_API_KEY: 'new-key',
ZHIPU_BASE_URL: 'https://api.z.ai/api/paas/v4'
};
getAiConfig({ env, warn });
getAiConfig({ env, warn });
expect(warn).toHaveBeenCalledTimes(1);
});
it('uses configured ZHIPU values and injected generator when API key exists', async () => {
const createModel = mock((config: {
apiKey?: string;
model: string;
baseUrl: string;
temperature: number;
}) => {
expect(config.apiKey).toBe('new-key');
expect(config.baseUrl).toBe(CODING_API_BASE_URL);
expect(config.model).toBe('glm-4-plus');
expect(config.temperature).toBe(0.4);
return { modelId: config.model };
});
const generate = mock(async (input: {
model: unknown;
system?: string;
prompt: string;
temperature: number;
maxRetries?: number;
}) => {
expect(input.system).toBe('Use concise style');
expect(input.prompt).toBe('Analyze this filing');
expect(input.temperature).toBe(0.4);
expect(input.maxRetries).toBe(0);
return { text: ' Generated insight ' };
});
const result = await runAiAnalysis('Analyze this filing', 'Use concise style', {
env: {
ZHIPU_API_KEY: 'new-key',
ZHIPU_MODEL: 'glm-4-plus',
ZHIPU_BASE_URL: 'https://api.z.ai/api/paas/v4',
AI_TEMPERATURE: '0.4'
},
warn: () => {},
createModel,
generate
});
expect(createModel).toHaveBeenCalledTimes(1);
expect(generate).toHaveBeenCalledTimes(1);
expect(result.provider).toBe('zhipu');
expect(result.model).toBe('glm-4-plus');
expect(result.text).toBe('Generated insight');
});
it('throws when AI generation returns an empty response', async () => {
await expect(
runAiAnalysis('Analyze this filing', undefined, {
env: { ZHIPU_API_KEY: 'new-key' },
warn: () => {},
createModel: () => ({}),
generate: async () => ({ text: ' ' })
})
).rejects.toThrow('AI SDK returned an empty response');
});
it('uses ollama defaults for extraction workload config', () => {
const config = getExtractionAiConfig({
env: {},
warn: () => {}
});
expect(config.provider).toBe('ollama');
expect(config.baseUrl).toBe('http://127.0.0.1:11434');
expect(config.model).toBe('qwen3:8b');
expect(config.apiKey).toBe('ollama');
expect(config.temperature).toBe(0);
});
it('uses extraction workload and returns ollama provider on success', async () => {
const createModel = mock((config: {
provider: string;
apiKey?: string;
model: string;
baseUrl: string;
temperature: number;
}) => {
expect(config.provider).toBe('ollama');
expect(config.baseUrl).toBe('http://127.0.0.1:11434');
expect(config.model).toBe('qwen3:8b');
expect(config.temperature).toBe(0);
return { modelId: config.model };
});
const generate = mock(async () => ({ text: '{"summary":"ok","keyPoints":[],"redFlags":[],"followUpQuestions":[],"portfolioSignals":[],"confidence":0.6}' }));
const result = await runAiAnalysis('Extract this filing', 'Return JSON', {
env: {
OLLAMA_MODEL: 'qwen3:8b'
},
warn: () => {},
workload: 'extraction',
createModel,
generate
});
expect(createModel).toHaveBeenCalledTimes(1);
expect(generate).toHaveBeenCalledTimes(1);
expect(result.provider).toBe('ollama');
expect(result.model).toBe('qwen3:8b');
});
it('falls back to local text when extraction workload generation fails', async () => {
const result = await runAiAnalysis('Extract this filing', 'Return JSON', {
env: {},
warn: () => {},
workload: 'extraction',
createModel: () => ({}),
generate: async () => {
throw new Error('ollama unavailable');
}
});
expect(result.provider).toBe('local-fallback');
expect(result.model).toBe('qwen3:8b');
expect(result.text).toContain('AI SDK fallback mode is active');
});
it('falls back to local text when report workload fails with insufficient balance', async () => {
const warn = mock((_message: string) => {});
const result = await runAiAnalysis('Analyze this filing', 'Use concise style', {
env: {
ZHIPU_API_KEY: 'new-key'
},
warn,
createModel: () => ({}),
generate: async () => {
throw new Error('AI_RetryError: Failed after 3 attempts. Last error: Insufficient balance or no resource package. Please recharge.');
}
});
expect(result.provider).toBe('local-fallback');
expect(result.model).toBe('glm-4.7-flashx');
expect(result.text).toContain('AI SDK fallback mode is active');
expect(warn).toHaveBeenCalledTimes(1);
});
it('falls back to local text when report workload cause contains insufficient balance', async () => {
const warn = mock((_message: string) => {});
const result = await runAiAnalysis('Analyze this filing', 'Use concise style', {
env: {
ZHIPU_API_KEY: 'new-key'
},
warn,
createModel: () => ({}),
generate: async () => {
const retryError = new Error('AI_RetryError: Failed after 3 attempts.');
(retryError as Error & { cause?: unknown }).cause = new Error(
'Last error: Insufficient balance or no resource package. Please recharge.'
);
throw retryError;
}
});
expect(result.provider).toBe('local-fallback');
expect(result.model).toBe('glm-4.7-flashx');
expect(result.text).toContain('AI SDK fallback mode is active');
expect(warn).toHaveBeenCalledTimes(1);
});
it('keeps throwing unknown report workload errors', async () => {
await expect(
runAiAnalysis('Analyze this filing', 'Use concise style', {
env: {
ZHIPU_API_KEY: 'new-key'
},
warn: () => {},
createModel: () => ({}),
generate: async () => {
throw new Error('unexpected schema mismatch');
}
})
).rejects.toThrow('unexpected schema mismatch');
});
});