Skip to content

Commit

Permalink
Tested Ollama models (at least the one my MBP can run).
Browse files Browse the repository at this point in the history
Had to update ollama and langchain-ollama, but several addtional local models now work with structured data.
  • Loading branch information
scosman committed Jan 29, 2025
1 parent c972e9a commit b2737d0
Show file tree
Hide file tree
Showing 3 changed files with 17 additions and 26 deletions.
20 changes: 5 additions & 15 deletions libs/core/kiln_ai/adapters/ml_model_list.py
Original file line number Diff line number Diff line change
Expand Up @@ -309,8 +309,7 @@ class KilnModel(BaseModel):
),
KilnModelProvider(
name=ModelProviderName.ollama,
# TODO: test this
supports_data_gen=False,
structured_output_mode=StructuredOutputMode.json_schema,
provider_options={
"model": "llama3.1:8b",
"model_aliases": ["llama3.1"], # 8b is default
Expand Down Expand Up @@ -357,7 +356,6 @@ class KilnModel(BaseModel):
),
KilnModelProvider(
name=ModelProviderName.ollama,
# TODO: test this
structured_output_mode=StructuredOutputMode.json_schema,
provider_options={"model": "llama3.1:70b"},
),
Expand Down Expand Up @@ -506,10 +504,7 @@ class KilnModel(BaseModel):
),
KilnModelProvider(
name=ModelProviderName.ollama,
# TODO: test this
structured_output_mode=StructuredOutputMode.json_schema,
supports_structured_output=False,
supports_data_gen=False,
provider_options={"model": "llama3.2-vision"},
),
KilnModelProvider(
Expand All @@ -535,7 +530,6 @@ class KilnModel(BaseModel):
),
KilnModelProvider(
name=ModelProviderName.ollama,
# TODO: test this
structured_output_mode=StructuredOutputMode.json_schema,
provider_options={"model": "llama3.2-vision:90b"},
),
Expand Down Expand Up @@ -571,7 +565,6 @@ class KilnModel(BaseModel):
),
KilnModelProvider(
name=ModelProviderName.ollama,
# TODO: test this
structured_output_mode=StructuredOutputMode.json_schema,
provider_options={"model": "llama3.3"},
),
Expand All @@ -596,7 +589,7 @@ class KilnModel(BaseModel):
providers=[
KilnModelProvider(
name=ModelProviderName.ollama,
# TODO: test this
structured_output_mode=StructuredOutputMode.json_schema,
supports_structured_output=False,
supports_data_gen=False,
provider_options={"model": "phi3.5"},
Expand Down Expand Up @@ -643,14 +636,15 @@ class KilnModel(BaseModel):
providers=[
KilnModelProvider(
name=ModelProviderName.ollama,
# TODO: test this
structured_output_mode=StructuredOutputMode.json_schema,
supports_data_gen=False,
provider_options={
"model": "gemma2:9b",
},
),
KilnModelProvider(
name=ModelProviderName.openrouter,
structured_output_mode=StructuredOutputMode.json_schema,
supports_data_gen=False,
provider_options={"model": "google/gemma-2-9b-it"},
),
Expand Down Expand Up @@ -691,9 +685,7 @@ class KilnModel(BaseModel):
),
KilnModelProvider(
name=ModelProviderName.ollama,
# TODO: test this
supports_structured_output=False,
supports_data_gen=False,
structured_output_mode=StructuredOutputMode.json_schema,
provider_options={"model": "mixtral"},
),
],
Expand All @@ -712,7 +704,6 @@ class KilnModel(BaseModel):
KilnModelProvider(
name=ModelProviderName.ollama,
provider_options={"model": "qwen2.5"},
# TODO: test this
structured_output_mode=StructuredOutputMode.json_schema,
),
],
Expand All @@ -734,7 +725,6 @@ class KilnModel(BaseModel):
KilnModelProvider(
name=ModelProviderName.ollama,
provider_options={"model": "qwen2.5:72b"},
# TODO: test this
structured_output_mode=StructuredOutputMode.json_schema,
),
KilnModelProvider(
Expand Down
2 changes: 1 addition & 1 deletion libs/core/pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ dependencies = [
"langchain-aws>=0.2.4",
"langchain-fireworks>=0.2.5",
"langchain-groq>=0.2.0",
"langchain-ollama>=0.2.0",
"langchain-ollama>=0.2.2",
"langchain-openai>=0.2.4",
"langchain>=0.3.5",
"openai>=1.53.0",
Expand Down
21 changes: 11 additions & 10 deletions uv.lock

Some generated files are not rendered by default. Learn more about how customized files appear on GitHub.

0 comments on commit b2737d0

Please sign in to comment.