API2D自动对齐
This commit is contained in:
parent
da7c03e868
commit
107ea868e1
@ -89,8 +89,8 @@ DEFAULT_FN_GROUPS = ['对话', '编程', '学术', '智能体']
|
|||||||
LLM_MODEL = "gpt-3.5-turbo" # 可选 ↓↓↓
|
LLM_MODEL = "gpt-3.5-turbo" # 可选 ↓↓↓
|
||||||
AVAIL_LLM_MODELS = ["gpt-3.5-turbo-1106","gpt-4-1106-preview",
|
AVAIL_LLM_MODELS = ["gpt-3.5-turbo-1106","gpt-4-1106-preview",
|
||||||
"gpt-3.5-turbo-16k", "gpt-3.5-turbo", "azure-gpt-3.5",
|
"gpt-3.5-turbo-16k", "gpt-3.5-turbo", "azure-gpt-3.5",
|
||||||
"api2d-gpt-3.5-turbo", 'api2d-gpt-3.5-turbo-16k',
|
"api2d-gpt-3.5-turbo", 'api2d-gpt-3.5-turbo-16k',
|
||||||
"gpt-4", "gpt-4-32k", "azure-gpt-4", "api2d-gpt-4",
|
"gpt-4", "gpt-4-32k", "azure-gpt-4", "api2d-gpt-4",
|
||||||
"chatglm3", "moss", "newbing", "claude-2"]
|
"chatglm3", "moss", "newbing", "claude-2"]
|
||||||
# P.S. 其他可用的模型还包括 ["zhipuai", "qianfan", "llama2", "qwen", "gpt-3.5-turbo-0613", "gpt-3.5-turbo-16k-0613", "gpt-3.5-random"
|
# P.S. 其他可用的模型还包括 ["zhipuai", "qianfan", "llama2", "qwen", "gpt-3.5-turbo-0613", "gpt-3.5-turbo-16k-0613", "gpt-3.5-random"
|
||||||
# "spark", "sparkv2", "sparkv3", "chatglm_onnx", "claude-1-100k", "claude-2", "internlm", "jittorllms_pangualpha", "jittorllms_llama"]
|
# "spark", "sparkv2", "sparkv3", "chatglm_onnx", "claude-1-100k", "claude-2", "internlm", "jittorllms_pangualpha", "jittorllms_llama"]
|
||||||
|
@ -177,11 +177,11 @@ model_info = {
|
|||||||
"fn_without_ui": chatgpt_noui,
|
"fn_without_ui": chatgpt_noui,
|
||||||
"endpoint": azure_endpoint,
|
"endpoint": azure_endpoint,
|
||||||
"max_token": 8192,
|
"max_token": 8192,
|
||||||
"tokenizer": tokenizer_gpt35,
|
"tokenizer": tokenizer_gpt4,
|
||||||
"token_cnt": get_token_num_gpt35,
|
"token_cnt": get_token_num_gpt4,
|
||||||
},
|
},
|
||||||
|
|
||||||
# api_2d
|
# api_2d (此后不需要在此处添加api2d的接口了,因为下面的代码会自动添加)
|
||||||
"api2d-gpt-3.5-turbo": {
|
"api2d-gpt-3.5-turbo": {
|
||||||
"fn_with_ui": chatgpt_ui,
|
"fn_with_ui": chatgpt_ui,
|
||||||
"fn_without_ui": chatgpt_noui,
|
"fn_without_ui": chatgpt_noui,
|
||||||
@ -200,15 +200,6 @@ model_info = {
|
|||||||
"token_cnt": get_token_num_gpt4,
|
"token_cnt": get_token_num_gpt4,
|
||||||
},
|
},
|
||||||
|
|
||||||
"api2d-gpt-3.5-turbo-16k": {
|
|
||||||
"fn_with_ui": chatgpt_ui,
|
|
||||||
"fn_without_ui": chatgpt_noui,
|
|
||||||
"endpoint": api2d_endpoint,
|
|
||||||
"max_token": 16385,
|
|
||||||
"tokenizer": tokenizer_gpt35,
|
|
||||||
"token_cnt": get_token_num_gpt35,
|
|
||||||
},
|
|
||||||
|
|
||||||
# 将 chatglm 直接对齐到 chatglm2
|
# 将 chatglm 直接对齐到 chatglm2
|
||||||
"chatglm": {
|
"chatglm": {
|
||||||
"fn_with_ui": chatglm_ui,
|
"fn_with_ui": chatglm_ui,
|
||||||
@ -244,6 +235,13 @@ model_info = {
|
|||||||
},
|
},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
# -=-=-=-=-=-=- api2d 对齐支持 -=-=-=-=-=-=-
|
||||||
|
for model in AVAIL_LLM_MODELS:
|
||||||
|
if model.startswith('api2d-') and (model.replace('api2d-','') in model_info.keys()):
|
||||||
|
mi = model_info[model.replace('api2d-','')]
|
||||||
|
mi.update({"endpoint": api2d_endpoint})
|
||||||
|
model_info.update({model: mi})
|
||||||
|
|
||||||
# -=-=-=-=-=-=- 以下部分是新加入的模型,可能附带额外依赖 -=-=-=-=-=-=-
|
# -=-=-=-=-=-=- 以下部分是新加入的模型,可能附带额外依赖 -=-=-=-=-=-=-
|
||||||
if "claude-1-100k" in AVAIL_LLM_MODELS or "claude-2" in AVAIL_LLM_MODELS:
|
if "claude-1-100k" in AVAIL_LLM_MODELS or "claude-2" in AVAIL_LLM_MODELS:
|
||||||
from .bridge_claude import predict_no_ui_long_connection as claude_noui
|
from .bridge_claude import predict_no_ui_long_connection as claude_noui
|
||||||
|
Loading…
x
Reference in New Issue
Block a user