From 3e4cb1e895ce8147eb9c1b2a9dda1b58c5e112a5 Mon Sep 17 00:00:00 2001 From: yodamaster726 Date: Fri, 22 Nov 2024 19:36:46 -0500 Subject: [PATCH 1/2] fix: ollamaModel already defined --- packages/core/src/defaultCharacter.ts | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/packages/core/src/defaultCharacter.ts b/packages/core/src/defaultCharacter.ts index 2ca1bf0179c..1a1b4e75834 100644 --- a/packages/core/src/defaultCharacter.ts +++ b/packages/core/src/defaultCharacter.ts @@ -5,7 +5,7 @@ export const defaultCharacter: Character = { username: "eliza", plugins: [], clients: [], - modelProvider: ModelProviderName.OPENAI, + modelProvider: ModelProviderName.OLLAMA, settings: { secrets: {}, voice: { From c6afcd99cf6beec63b50939387adda653c85cef4 Mon Sep 17 00:00:00 2001 From: yodamaster726 Date: Fri, 22 Nov 2024 19:38:47 -0500 Subject: [PATCH 2/2] fix: ollamaModel already defined fix: ollamaModel already defined --- packages/plugin-node/src/services/llama.ts | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/packages/plugin-node/src/services/llama.ts b/packages/plugin-node/src/services/llama.ts index d4982e6bc6d..b930fcfbe89 100644 --- a/packages/plugin-node/src/services/llama.ts +++ b/packages/plugin-node/src/services/llama.ts @@ -164,6 +164,7 @@ export class LlamaService extends Service { private ctx: LlamaContext | undefined; private sequence: LlamaContextSequence | undefined; private modelUrl: string; + private ollamaModel: string | undefined; private messageQueue: QueuedMessage[] = []; private isProcessing: boolean = false; @@ -179,6 +180,7 @@ export class LlamaService extends Service { "https://huggingface.co/NousResearch/Hermes-3-Llama-3.1-8B-GGUF/resolve/main/Hermes-3-Llama-3.1-8B.Q8_0.gguf?download=true"; const modelName = "model.gguf"; this.modelPath = path.join(__dirname, modelName); + this.ollamaModel = process.env.OLLAMA_MODEL; } async initialize(runtime: IAgentRuntime): Promise {} @@ -486,13 +488,12 @@ export class LlamaService extends Service { throw new Error("Model not initialized. Call initialize() first."); } - const ollamaModel = process.env.OLLAMA_MODEL; const ollamaUrl = process.env.OLLAMA_SERVER_URL || "http://localhost:11434"; const embeddingModel = process.env.OLLAMA_EMBEDDING_MODEL || "mxbai-embed-large"; elizaLogger.info( - `Using Ollama API for embeddings with model ${embeddingModel} (base: ${ollamaModel})` + `Using Ollama API for embeddings with model ${embeddingModel} (base: ${this.ollamaModel})` ); const response = await fetch(`${ollamaUrl}/api/embeddings`, {