Skip to content

Commit

Permalink
Merge remote-tracking branch 'upstream/master'
Browse files Browse the repository at this point in the history
  • Loading branch information
awwaawwa committed Jul 23, 2024
2 parents 8515c87 + 60b3491 commit 9160226
Show file tree
Hide file tree
Showing 2 changed files with 25 additions and 15 deletions.
2 changes: 1 addition & 1 deletion config.py
Original file line number Diff line number Diff line change
Expand Up @@ -33,7 +33,7 @@
# [step 3]>> 模型选择是 (注意: LLM_MODEL是默认选中的模型, 它*必须*被包含在AVAIL_LLM_MODELS列表中 )
LLM_MODEL = "gpt-3.5-turbo-16k" # 可选 ↓↓↓
AVAIL_LLM_MODELS = ["gpt-4-1106-preview", "gpt-4-turbo-preview", "gpt-4-vision-preview", "gpt-4-turbo", "gpt-4-turbo-2024-04-09",
"gpt-4o", "gpt-4o-2024-05-13",
"gpt-4o", "gpt-4o-2024-05-13", "gpt-4o-mini",
"gpt-3.5-turbo-1106", "gpt-3.5-turbo-16k", "gpt-3.5-turbo", "azure-gpt-3.5",
"gpt-4", "gpt-4-32k", "azure-gpt-4", "glm-4", "glm-4v", "glm-3-turbo",
"gemini-pro", "chatglm3"
Expand Down
38 changes: 24 additions & 14 deletions request_llms/bridge_all.py
Original file line number Diff line number Diff line change
Expand Up @@ -247,7 +247,7 @@ def decode(self, *args, **kwargs):
"token_cnt": get_token_num_gpt35,
},

"gpt-3.5-turbo-1106": { #16k
"gpt-3.5-turbo-1106": { # 16k
"fn_with_ui": chatgpt_ui,
"fn_without_ui": chatgpt_noui,
"endpoint": openai_endpoint,
Expand All @@ -256,7 +256,7 @@ def decode(self, *args, **kwargs):
"token_cnt": get_token_num_gpt35,
},

"gpt-3.5-turbo-0125": { #16k
"gpt-3.5-turbo-0125": { # 16k
"fn_with_ui": chatgpt_ui,
"fn_without_ui": chatgpt_noui,
"endpoint": openai_endpoint,
Expand All @@ -283,7 +283,7 @@ def decode(self, *args, **kwargs):
"token_cnt": get_token_num_gpt4,
},

"gpt-4-turbo-preview": {
"gpt-4o": {
"fn_with_ui": chatgpt_ui,
"fn_without_ui": chatgpt_noui,
"endpoint": openai_endpoint,
Expand All @@ -293,7 +293,17 @@ def decode(self, *args, **kwargs):
"token_cnt": get_token_num_gpt4,
},

"gpt-4-1106-preview": {
"gpt-4o-mini": {
"fn_with_ui": chatgpt_ui,
"fn_without_ui": chatgpt_noui,
"endpoint": openai_endpoint,
"has_multimodal_capacity": True,
"max_token": 128000,
"tokenizer": tokenizer_gpt4,
"token_cnt": get_token_num_gpt4,
},

"gpt-4o-2024-05-13": {
"fn_with_ui": chatgpt_ui,
"fn_without_ui": chatgpt_noui,
"has_multimodal_capacity": True,
Expand All @@ -303,7 +313,7 @@ def decode(self, *args, **kwargs):
"token_cnt": get_token_num_gpt4,
},

"gpt-4-0125-preview": {
"gpt-4-turbo-preview": {
"fn_with_ui": chatgpt_ui,
"fn_without_ui": chatgpt_noui,
"endpoint": openai_endpoint,
Expand All @@ -312,7 +322,7 @@ def decode(self, *args, **kwargs):
"token_cnt": get_token_num_gpt4,
},

"gpt-4-turbo": {
"gpt-4-1106-preview": {
"fn_with_ui": chatgpt_ui,
"fn_without_ui": chatgpt_noui,
"endpoint": openai_endpoint,
Expand All @@ -321,7 +331,7 @@ def decode(self, *args, **kwargs):
"token_cnt": get_token_num_gpt4,
},

"gpt-4-turbo-2024-04-09": {
"gpt-4-0125-preview": {
"fn_with_ui": chatgpt_ui,
"fn_without_ui": chatgpt_noui,
"endpoint": openai_endpoint,
Expand All @@ -330,24 +340,24 @@ def decode(self, *args, **kwargs):
"token_cnt": get_token_num_gpt4,
},

"gpt-4o": {
"gpt-4-turbo": {
"fn_with_ui": chatgpt_ui,
"fn_without_ui": chatgpt_noui,
"has_multimodal_capacity": True,
"endpoint": openai_endpoint,
"max_token": 128000,
"tokenizer": tokenizer_gpt4o,
"token_cnt": get_token_num_gpt4o,
"tokenizer": tokenizer_gpt4,
"token_cnt": get_token_num_gpt4,
},
"gpt-4o-2024-05-13": {

"gpt-4-turbo-2024-04-09": {
"fn_with_ui": chatgpt_ui,
"fn_without_ui": chatgpt_noui,
"has_multimodal_capacity": True,
"endpoint": openai_endpoint,
"max_token": 128000,
"tokenizer": tokenizer_gpt4o,
"token_cnt": get_token_num_gpt4o,
"tokenizer": tokenizer_gpt4,
"token_cnt": get_token_num_gpt4,
},

"gpt-3.5-random": {
Expand Down

0 comments on commit 9160226

Please sign in to comment.