From 78a8259b82ed39036e7add295b50584010b575ed Mon Sep 17 00:00:00 2001 From: binary-husky <96192199+binary-husky@users.noreply.github.com> Date: Mon, 14 Aug 2023 10:24:59 +0800 Subject: [PATCH] Update bridge_all.py --- request_llm/bridge_all.py | 5 +++-- 1 file changed, 3 insertions(+), 2 deletions(-) diff --git a/request_llm/bridge_all.py b/request_llm/bridge_all.py index e7fc830..4c7f19d 100644 --- a/request_llm/bridge_all.py +++ b/request_llm/bridge_all.py @@ -369,7 +369,7 @@ if "chatgpt_website" in AVAIL_LLM_MODELS: # 接入一些逆向工程https://gi }) except: print(trimmed_format_exc()) -if "spark" in AVAIL_LLM_MODELS: # 接入一些逆向工程https://github.com/acheong08/ChatGPT-to-API/ +if "spark" in AVAIL_LLM_MODELS: # 讯飞星火认知大模型 try: from .bridge_spark import predict_no_ui_long_connection as spark_noui from .bridge_spark import predict as spark_ui @@ -427,7 +427,8 @@ def predict_no_ui_long_connection(inputs, llm_kwargs, history, sys_prompt, obser method = model_info[model]["fn_without_ui"] return method(inputs, llm_kwargs, history, sys_prompt, observe_window, console_slience) else: - # 如果同时询问多个大语言模型: + + # 如果同时询问多个大语言模型,这个稍微啰嗦一点,但思路相同,您不必读这个else分支 executor = ThreadPoolExecutor(max_workers=4) models = model.split('&') n_model = len(models)