diff --git a/.env.example b/.env.example index 1360105b99..cfe2a5bcdc 100644 --- a/.env.example +++ b/.env.example @@ -62,6 +62,9 @@ SMALL_OLLAMA_MODEL= # Default: llama3.2 MEDIUM_OLLAMA_MODEL= # Default: hermes3 LARGE_OLLAMA_MODEL= # Default: hermes3:70b +#LlamaLocal Configuration +LLAMALOCAL_PATH= # Default: "" which is the current directory in plugin-node/dist/ which gets destroyed and recreated on every build + # API Keys ANTHROPIC_API_KEY= # For Claude HEURIST_API_KEY= # Get from https://heurist.ai/dev-access diff --git a/packages/plugin-node/src/services/llama.ts b/packages/plugin-node/src/services/llama.ts index f158c1fefe..8a6c8a2f16 100644 --- a/packages/plugin-node/src/services/llama.ts +++ b/packages/plugin-node/src/services/llama.ts @@ -185,7 +185,7 @@ export class LlamaService extends Service { this.modelUrl = "https://huggingface.co/NousResearch/Hermes-3-Llama-3.1-8B-GGUF/resolve/main/Hermes-3-Llama-3.1-8B.Q8_0.gguf?download=true"; const modelName = "model.gguf"; - this.modelPath = path.join(__dirname, modelName); + this.modelPath = path.join(process.env.LLAMALOCAL_PATH, modelName); this.ollamaModel = process.env.OLLAMA_MODEL; }