Implement dual-model filing pipeline with Ollama extraction

This commit is contained in:
2026-02-28 16:31:25 -05:00
parent 0615534f4b
commit a09001501e
16 changed files with 872 additions and 51 deletions

View File

@@ -19,6 +19,12 @@ ZHIPU_API_KEY=
ZHIPU_MODEL=glm-4.7-flashx
AI_TEMPERATURE=0.2
# Local extraction model (Ollama, OpenAI-compatible API)
# For host Ollama from Docker, use http://host.docker.internal:11434
OLLAMA_BASE_URL=http://127.0.0.1:11434
OLLAMA_MODEL=qwen3:8b
OLLAMA_API_KEY=ollama
# SEC API etiquette
SEC_USER_AGENT=Fiscal Clone <support@fiscal.local>