From b2737d0b38e1c71ae9000ec73848bed60e3c3800 Mon Sep 17 00:00:00 2001 From: scosman Date: Tue, 28 Jan 2025 19:54:24 -0500 Subject: [PATCH] Tested Ollama models (at least the one my MBP can run). Had to update ollama and langchain-ollama, but several addtional local models now work with structured data. --- libs/core/kiln_ai/adapters/ml_model_list.py | 20 +++++--------------- libs/core/pyproject.toml | 2 +- uv.lock | 21 +++++++++++---------- 3 files changed, 17 insertions(+), 26 deletions(-) diff --git a/libs/core/kiln_ai/adapters/ml_model_list.py b/libs/core/kiln_ai/adapters/ml_model_list.py index 5adff6f5..94ac14d6 100644 --- a/libs/core/kiln_ai/adapters/ml_model_list.py +++ b/libs/core/kiln_ai/adapters/ml_model_list.py @@ -309,8 +309,7 @@ class KilnModel(BaseModel): ), KilnModelProvider( name=ModelProviderName.ollama, - # TODO: test this - supports_data_gen=False, + structured_output_mode=StructuredOutputMode.json_schema, provider_options={ "model": "llama3.1:8b", "model_aliases": ["llama3.1"], # 8b is default @@ -357,7 +356,6 @@ class KilnModel(BaseModel): ), KilnModelProvider( name=ModelProviderName.ollama, - # TODO: test this structured_output_mode=StructuredOutputMode.json_schema, provider_options={"model": "llama3.1:70b"}, ), @@ -506,10 +504,7 @@ class KilnModel(BaseModel): ), KilnModelProvider( name=ModelProviderName.ollama, - # TODO: test this structured_output_mode=StructuredOutputMode.json_schema, - supports_structured_output=False, - supports_data_gen=False, provider_options={"model": "llama3.2-vision"}, ), KilnModelProvider( @@ -535,7 +530,6 @@ class KilnModel(BaseModel): ), KilnModelProvider( name=ModelProviderName.ollama, - # TODO: test this structured_output_mode=StructuredOutputMode.json_schema, provider_options={"model": "llama3.2-vision:90b"}, ), @@ -571,7 +565,6 @@ class KilnModel(BaseModel): ), KilnModelProvider( name=ModelProviderName.ollama, - # TODO: test this structured_output_mode=StructuredOutputMode.json_schema, provider_options={"model": "llama3.3"}, ), @@ -596,7 +589,7 @@ class KilnModel(BaseModel): providers=[ KilnModelProvider( name=ModelProviderName.ollama, - # TODO: test this + structured_output_mode=StructuredOutputMode.json_schema, supports_structured_output=False, supports_data_gen=False, provider_options={"model": "phi3.5"}, @@ -643,7 +636,7 @@ class KilnModel(BaseModel): providers=[ KilnModelProvider( name=ModelProviderName.ollama, - # TODO: test this + structured_output_mode=StructuredOutputMode.json_schema, supports_data_gen=False, provider_options={ "model": "gemma2:9b", @@ -651,6 +644,7 @@ class KilnModel(BaseModel): ), KilnModelProvider( name=ModelProviderName.openrouter, + structured_output_mode=StructuredOutputMode.json_schema, supports_data_gen=False, provider_options={"model": "google/gemma-2-9b-it"}, ), @@ -691,9 +685,7 @@ class KilnModel(BaseModel): ), KilnModelProvider( name=ModelProviderName.ollama, - # TODO: test this - supports_structured_output=False, - supports_data_gen=False, + structured_output_mode=StructuredOutputMode.json_schema, provider_options={"model": "mixtral"}, ), ], @@ -712,7 +704,6 @@ class KilnModel(BaseModel): KilnModelProvider( name=ModelProviderName.ollama, provider_options={"model": "qwen2.5"}, - # TODO: test this structured_output_mode=StructuredOutputMode.json_schema, ), ], @@ -734,7 +725,6 @@ class KilnModel(BaseModel): KilnModelProvider( name=ModelProviderName.ollama, provider_options={"model": "qwen2.5:72b"}, - # TODO: test this structured_output_mode=StructuredOutputMode.json_schema, ), KilnModelProvider( diff --git a/libs/core/pyproject.toml b/libs/core/pyproject.toml index 95a651df..43dd9ff9 100644 --- a/libs/core/pyproject.toml +++ b/libs/core/pyproject.toml @@ -22,7 +22,7 @@ dependencies = [ "langchain-aws>=0.2.4", "langchain-fireworks>=0.2.5", "langchain-groq>=0.2.0", - "langchain-ollama>=0.2.0", + "langchain-ollama>=0.2.2", "langchain-openai>=0.2.4", "langchain>=0.3.5", "openai>=1.53.0", diff --git a/uv.lock b/uv.lock index 5b507cd0..1bf056d1 100644 --- a/uv.lock +++ b/uv.lock @@ -794,7 +794,7 @@ requires-dist = [ { name = "langchain-aws", specifier = ">=0.2.4" }, { name = "langchain-fireworks", specifier = ">=0.2.5" }, { name = "langchain-groq", specifier = ">=0.2.0" }, - { name = "langchain-ollama", specifier = ">=0.2.0" }, + { name = "langchain-ollama", specifier = ">=0.2.2" }, { name = "langchain-openai", specifier = ">=0.2.4" }, { name = "openai", specifier = ">=1.53.0" }, { name = "pdoc", specifier = ">=15.0.0" }, @@ -953,7 +953,7 @@ wheels = [ [[package]] name = "langchain-core" -version = "0.3.21" +version = "0.3.32" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "jsonpatch" }, @@ -964,9 +964,9 @@ dependencies = [ { name = "tenacity" }, { name = "typing-extensions" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/64/0c/9388d0959dff69fbca6e0f79076811cb1a494e06d04c8a880079228edded/langchain_core-0.3.21.tar.gz", hash = "sha256:561b52b258ffa50a9fb11d7a1940ebfd915654d1ec95b35e81dfd5ee84143411", size = 328597 } +sdist = { url = "https://files.pythonhosted.org/packages/37/96/5ac1277e4e7bb0e134ae3c91a970556458fa6a54bd9c4a2ac9d13b098697/langchain_core-0.3.32.tar.gz", hash = "sha256:4eb85d8428585e67a1766e29c6aa2f246c6329d97cb486e8d6f564ab0bd94a4f", size = 331235 } wheels = [ - { url = "https://files.pythonhosted.org/packages/9b/50/e0bd90fc481d1cc8c2039ad6161b20fc8e396a7cba6064a4f1e8e5afea62/langchain_core-0.3.21-py3-none-any.whl", hash = "sha256:7e723dff80946a1198976c6876fea8326dc82566ef9bcb5f8d9188f738733665", size = 409467 }, + { url = "https://files.pythonhosted.org/packages/3f/bb/f4a7a98ae965acacd75dcbc85a714589a20e910906691b3ebc03089e6962/langchain_core-0.3.32-py3-none-any.whl", hash = "sha256:c050bd1e6dd556ae49073d338aca9dca08b7b55f4778ddce881a12224bc82a7e", size = 412416 }, ] [[package]] @@ -1000,15 +1000,15 @@ wheels = [ [[package]] name = "langchain-ollama" -version = "0.2.0" +version = "0.2.2" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "langchain-core" }, { name = "ollama" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/81/1b/c05811f5a0bb0c57a4aadd01eaadbfff8cb0d1225a22e4a79d9185b984a2/langchain_ollama-0.2.0.tar.gz", hash = "sha256:250ad9f3edce1a0ca16e4fad19f783ac728d7d76888ba952c462cd9f680353f7", size = 11364 } +sdist = { url = "https://files.pythonhosted.org/packages/88/8e/21f1df6af0983cf1bc6d15c71f21bd17d968114bf6cce0fb40442f5ba81f/langchain_ollama-0.2.2.tar.gz", hash = "sha256:2d9bcb06ffdbe43c7c6906c46e710d36d33b6b99cd4975cbf54060f13e51c875", size = 16970 } wheels = [ - { url = "https://files.pythonhosted.org/packages/ba/15/8192d91f2b27bab26b9fd008a1212b44f4848d07d311777ab6ec0ae7b49a/langchain_ollama-0.2.0-py3-none-any.whl", hash = "sha256:aa5b794599652494a07fd27b22784854480cd4c793f0db5e81ebeccc2affd135", size = 14856 }, + { url = "https://files.pythonhosted.org/packages/7f/77/219fb2290c832e33af2731246ea3328bade50756288c1e97ae73c4ccc197/langchain_ollama-0.2.2-py3-none-any.whl", hash = "sha256:8a1ee72dbb6ea3b3ace1d9dd317e472d667a8ed491328550da59f4893a6796f8", size = 18362 }, ] [[package]] @@ -1238,14 +1238,15 @@ wheels = [ [[package]] name = "ollama" -version = "0.3.3" +version = "0.4.7" source = { registry = "https://pypi.org/simple" } dependencies = [ { name = "httpx" }, + { name = "pydantic" }, ] -sdist = { url = "https://files.pythonhosted.org/packages/a6/8e/60a9b065eb796ef3996451cbe2d8044f6b030696166693b9805ae33b8b4c/ollama-0.3.3.tar.gz", hash = "sha256:f90a6d61803117f40b0e8ff17465cab5e1eb24758a473cfe8101aff38bc13b51", size = 10390 } +sdist = { url = "https://files.pythonhosted.org/packages/b0/6d/dc77539c735bbed5d0c873fb029fb86aa9f0163df169b34152914331c369/ollama-0.4.7.tar.gz", hash = "sha256:891dcbe54f55397d82d289c459de0ea897e103b86a3f1fad0fdb1895922a75ff", size = 12843 } wheels = [ - { url = "https://files.pythonhosted.org/packages/6a/ca/d22905ac3f768523f778189d38c9c6cd9edf4fa9dd09cb5a3fc57b184f90/ollama-0.3.3-py3-none-any.whl", hash = "sha256:ca6242ce78ab34758082b7392df3f9f6c2cb1d070a9dede1a4c545c929e16dba", size = 10267 }, + { url = "https://files.pythonhosted.org/packages/31/83/c3ffac86906c10184c88c2e916460806b072a2cfe34cdcaf3a0c0e836d39/ollama-0.4.7-py3-none-any.whl", hash = "sha256:85505663cca67a83707be5fb3aeff0ea72e67846cea5985529d8eca4366564a1", size = 13210 }, ] [[package]]