From aff4d543f339447dc72724762568c5121028e1d0 Mon Sep 17 00:00:00 2001 From: qevan Date: Tue, 28 Jan 2025 10:24:34 -0800 Subject: [PATCH] add LLM env (#48) --- .env.template | 6 ++++-- src/SentientAI.ts | 7 +++++-- 2 files changed, 9 insertions(+), 4 deletions(-) diff --git a/.env.template b/.env.template index 9ff0a57..8aa0904 100644 --- a/.env.template +++ b/.env.template @@ -6,7 +6,9 @@ DEEPSEEK_API_KEY=your_deepseek_api_key ANTHROPIC_API_KEY=your_anthropic_api_key OPENAI_API_KEY=your_openai_api_key -FAST_LLM_MODEL=gpt-4o-mini -LLM_MODEL=deepseek-chat +FAST_LLM_PROVIDER= +FAST_LLM_MODEL= +LLM_PROVIDER= +LLM_MODEL= API_V2_KEY=your_api_v2_key # for iotex l1 data \ No newline at end of file diff --git a/src/SentientAI.ts b/src/SentientAI.ts index 6b34f2a..718a3c3 100644 --- a/src/SentientAI.ts +++ b/src/SentientAI.ts @@ -9,6 +9,9 @@ export class SentientAI { orchestrator: QueryOrchestrator; constructor() { + if (!process.env.FAST_LLM_PROVIDER || !process.env.LLM_PROVIDER) { + throw new Error("FAST_LLM_PROVIDER and LLM_PROVIDER must be set"); + } this.orchestrator = new QueryOrchestrator({ tools: [ new NewsAPITool(), @@ -19,8 +22,8 @@ export class SentientAI { new L1DataTool(), ], llmService: new LLMService({ - fastLLMProvider: "openai", - llmProvider: "deepseek", + fastLLMProvider: process.env.FAST_LLM_PROVIDER, + llmProvider: process.env.LLM_PROVIDER, }), }); }