From 16bfb958f804e81b88ca3568ebee5c1a90cd4134 Mon Sep 17 00:00:00 2001 From: Nate Sesti Date: Mon, 29 Apr 2024 16:02:01 -0700 Subject: [PATCH] =?UTF-8?q?=F0=9F=93=9D=20explain=20Ollama=20remote=20host?= =?UTF-8?q?ing=20in=20docs?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- docs/docs/reference/Model Providers/ollama.md | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/docs/docs/reference/Model Providers/ollama.md b/docs/docs/reference/Model Providers/ollama.md index f2018e68b0..664f77a8ff 100644 --- a/docs/docs/reference/Model Providers/ollama.md +++ b/docs/docs/reference/Model Providers/ollama.md @@ -9,12 +9,15 @@ "title": "Ollama", "provider": "ollama", "model": "llama2-7b", - "completionOptions": {} + "completionOptions": {}, + "apiBase": "http://localhost:11434" } ] } ``` +If you'd like to host Ollama on another machine, you can set it up as described in the [Ollama FAQ](https://github.com/ollama/ollama/blob/main/docs/faq.md#how-can-i-expose-ollama-on-my-network), and then set `"apiBase"` to match the IP address / port of that machine. + ## Completion Options In addition to the model type, you can also configure some of the parameters that Ollama uses to run the model.