From 4824905592d608e133eb9e4962c803ace0a369b0 Mon Sep 17 00:00:00 2001 From: Yao Xiao Date: Tue, 7 Nov 2023 09:48:01 +0800 Subject: [PATCH 1/2] Add new API support --- config.py | 2 +- request_llm/bridge_all.py | 18 ++++++++++++++++++ 2 files changed, 19 insertions(+), 1 deletion(-) diff --git a/config.py b/config.py index b4f00a6..903ee20 100644 --- a/config.py +++ b/config.py @@ -87,7 +87,7 @@ DEFAULT_FN_GROUPS = ['对话', '编程', '学术', '智能体'] # 模型选择是 (注意: LLM_MODEL是默认选中的模型, 它*必须*被包含在AVAIL_LLM_MODELS列表中 ) LLM_MODEL = "gpt-3.5-turbo" # 可选 ↓↓↓ -AVAIL_LLM_MODELS = ["gpt-3.5-turbo-16k", "gpt-3.5-turbo", "azure-gpt-3.5", +AVAIL_LLM_MODELS = ["gpt-3.5-turbo-1106","gpt-4-1106-preview","gpt-3.5-turbo-16k", "gpt-3.5-turbo", "azure-gpt-3.5", "api2d-gpt-3.5-turbo", 'api2d-gpt-3.5-turbo-16k', "gpt-4", "gpt-4-32k", "azure-gpt-4", "api2d-gpt-4", "chatglm", "moss", "newbing", "claude-2"] diff --git a/request_llm/bridge_all.py b/request_llm/bridge_all.py index f85d1b6..3d6e4bd 100644 --- a/request_llm/bridge_all.py +++ b/request_llm/bridge_all.py @@ -117,6 +117,15 @@ model_info = { "token_cnt": get_token_num_gpt35, }, + "gpt-3.5-turbo-1106": {#16k + "fn_with_ui": chatgpt_ui, + "fn_without_ui": chatgpt_noui, + "endpoint": openai_endpoint, + "max_token": 16385, + "tokenizer": tokenizer_gpt35, + "token_cnt": get_token_num_gpt35, + }, + "gpt-4": { "fn_with_ui": chatgpt_ui, "fn_without_ui": chatgpt_noui, @@ -135,6 +144,15 @@ model_info = { "token_cnt": get_token_num_gpt4, }, + "gpt-4-1106-preview": { + "fn_with_ui": chatgpt_ui, + "fn_without_ui": chatgpt_noui, + "endpoint": openai_endpoint, + "max_token": 128000, + "tokenizer": tokenizer_gpt4, + "token_cnt": get_token_num_gpt4, + }, + "gpt-3.5-random": { "fn_with_ui": chatgpt_ui, "fn_without_ui": chatgpt_noui, From 0ff750b60a645c739ab09173a86d0ab9b1482483 Mon Sep 17 00:00:00 2001 From: qingxu fu <505030475@qq.com> Date: Fri, 10 Nov 2023 12:40:25 +0800 Subject: [PATCH 2/2] =?UTF-8?q?=E4=BF=AE=E6=94=B9=E7=BC=A9=E8=BF=9B?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- config.py | 3 ++- request_llms/bridge_all.py | 12 ++++++------ 2 files changed, 8 insertions(+), 7 deletions(-) diff --git a/config.py b/config.py index f44c47d..38d0519 100644 --- a/config.py +++ b/config.py @@ -87,7 +87,8 @@ DEFAULT_FN_GROUPS = ['对话', '编程', '学术', '智能体'] # 模型选择是 (注意: LLM_MODEL是默认选中的模型, 它*必须*被包含在AVAIL_LLM_MODELS列表中 ) LLM_MODEL = "gpt-3.5-turbo" # 可选 ↓↓↓ -AVAIL_LLM_MODELS = ["gpt-3.5-turbo-1106","gpt-4-1106-preview","gpt-3.5-turbo-16k", "gpt-3.5-turbo", "azure-gpt-3.5", +AVAIL_LLM_MODELS = ["gpt-3.5-turbo-1106","gpt-4-1106-preview", + "gpt-3.5-turbo-16k", "gpt-3.5-turbo", "azure-gpt-3.5", "api2d-gpt-3.5-turbo", 'api2d-gpt-3.5-turbo-16k', "gpt-4", "gpt-4-32k", "azure-gpt-4", "api2d-gpt-4", "chatglm3", "moss", "newbing", "claude-2"] diff --git a/request_llms/bridge_all.py b/request_llms/bridge_all.py index 6d34d95..3a93234 100644 --- a/request_llms/bridge_all.py +++ b/request_llms/bridge_all.py @@ -145,12 +145,12 @@ model_info = { }, "gpt-4-1106-preview": { - "fn_with_ui": chatgpt_ui, - "fn_without_ui": chatgpt_noui, - "endpoint": openai_endpoint, - "max_token": 128000, - "tokenizer": tokenizer_gpt4, - "token_cnt": get_token_num_gpt4, + "fn_with_ui": chatgpt_ui, + "fn_without_ui": chatgpt_noui, + "endpoint": openai_endpoint, + "max_token": 128000, + "tokenizer": tokenizer_gpt4, + "token_cnt": get_token_num_gpt4, }, "gpt-3.5-random": {