From 041d79c60dbcbe7f5239a652f2f5a503cc2face2 Mon Sep 17 00:00:00 2001 From: Muspi Merol Date: Wed, 19 Feb 2025 21:36:28 +0800 Subject: [PATCH] chore: update llm from groq provider --- src/utils/llm/__init__.py | 5 +++++ src/utils/llm/groq.py | 2 ++ 2 files changed, 7 insertions(+) diff --git a/src/utils/llm/__init__.py b/src/utils/llm/__init__.py index 41f10e7..f0c63e2 100644 --- a/src/utils/llm/__init__.py +++ b/src/utils/llm/__init__.py @@ -72,6 +72,11 @@ "claude-3-opus-20240229", "claude-3-sonnet-20240229", "claude-3-haiku-20240307", + "qwen-2.5-32b", + "qwen-2.5-coder-32b", + "deepseek-r1-distill-qwen-32b", + "deepseek-r1-distill-llama-70b", + "deepseek-r1-distill-llama-70b-specdec", "gemma-7b-it", "gemma2-9b-it", "llama3-8b-8192", diff --git a/src/utils/llm/groq.py b/src/utils/llm/groq.py index dadfd1c..dd6be0b 100644 --- a/src/utils/llm/groq.py +++ b/src/utils/llm/groq.py @@ -18,6 +18,8 @@ @link_llm("llama-3.2") @link_llm("llama-3.3-70b-") @link_llm("mixtral") +@link_llm("qwen-2.5") +@link_llm("deepseek-r1-distill") class Groq(AsyncChatOpenAI): async def complete(self, prompt: str | list[Message], /, **config): config = self._run_config | config