* Update version to 3.74 * Add support for Yi Model API (#1635) * 更新以支持零一万物模型 * 删除newbing * 修改config --------- Co-authored-by: binary-husky <qingxu.fu@outlook.com> * Refactor function signatures in bridge files * fix qwen api change * rename and ref functions * rename and move some cookie functions * 增加haiku模型,新增endpoint配置说明 (#1626) * haiku added * 新增haiku,新增endpoint配置说明 * Haiku added * 将说明同步至最新Endpoint --------- Co-authored-by: binary-husky <qingxu.fu@outlook.com> * private_upload目录下进行文件鉴权 (#1596) * private_upload目录下进行文件鉴权 * minor fastapi adjustment * Add logging functionality to enable saving conversation records * waiting to fix username retrieve * support 2rd web path * allow accessing default user dir --------- Co-authored-by: binary-husky <qingxu.fu@outlook.com> * remove yaml deps * fix favicon * fix abs path auth problem * forget to write a return * add `dashscope` to deps * fix GHSA-v9q9-xj86-953p * 用户名重叠越权访问patch (#1681) * add cohere model api access * cohere + can_multi_thread * fix block user access(fail) * fix fastapi bug * change cohere api endpoint * explain version --------- Co-authored-by: Menghuan1918 <menghuan2003@outlook.com> Co-authored-by: Skyzayre <120616113+Skyzayre@users.noreply.github.com> Co-authored-by: XIao <46100050+Kilig947@users.noreply.github.com>
56 lines
2.2 KiB
Python
56 lines
2.2 KiB
Python
# """
|
|
# 对各个llm模型进行单元测试
|
|
# """
|
|
def validate_path():
|
|
import os, sys
|
|
|
|
os.path.dirname(__file__)
|
|
root_dir_assume = os.path.abspath(os.path.dirname(__file__) + "/..")
|
|
os.chdir(root_dir_assume)
|
|
sys.path.append(root_dir_assume)
|
|
|
|
|
|
validate_path() # validate path so you can run from base directory
|
|
|
|
if "在线模型":
|
|
if __name__ == "__main__":
|
|
from request_llms.bridge_cohere import predict_no_ui_long_connection
|
|
# from request_llms.bridge_spark import predict_no_ui_long_connection
|
|
# from request_llms.bridge_zhipu import predict_no_ui_long_connection
|
|
# from request_llms.bridge_chatglm3 import predict_no_ui_long_connection
|
|
llm_kwargs = {
|
|
"llm_model": "command-r-plus",
|
|
"max_length": 4096,
|
|
"top_p": 1,
|
|
"temperature": 1,
|
|
}
|
|
|
|
result = predict_no_ui_long_connection(
|
|
inputs="请问什么是质子?", llm_kwargs=llm_kwargs, history=["你好", "我好!"], sys_prompt="系统"
|
|
)
|
|
print("final result:", result)
|
|
print("final result:", result)
|
|
|
|
|
|
if "本地模型":
|
|
if __name__ == "__main__":
|
|
# from request_llms.bridge_newbingfree import predict_no_ui_long_connection
|
|
# from request_llms.bridge_moss import predict_no_ui_long_connection
|
|
# from request_llms.bridge_jittorllms_pangualpha import predict_no_ui_long_connection
|
|
# from request_llms.bridge_jittorllms_llama import predict_no_ui_long_connection
|
|
# from request_llms.bridge_claude import predict_no_ui_long_connection
|
|
# from request_llms.bridge_internlm import predict_no_ui_long_connection
|
|
# from request_llms.bridge_deepseekcoder import predict_no_ui_long_connection
|
|
# from request_llms.bridge_qwen_7B import predict_no_ui_long_connection
|
|
# from request_llms.bridge_qwen_local import predict_no_ui_long_connection
|
|
llm_kwargs = {
|
|
"max_length": 4096,
|
|
"top_p": 1,
|
|
"temperature": 1,
|
|
}
|
|
result = predict_no_ui_long_connection(
|
|
inputs="请问什么是质子?", llm_kwargs=llm_kwargs, history=["你好", "我好!"], sys_prompt=""
|
|
)
|
|
print("final result:", result)
|
|
|