From 98dd3ae1c02cc93d7fe75a9d9bab7c49d9807aca Mon Sep 17 00:00:00 2001 From: XIao <46100050+Kilig947@users.noreply.github.com> Date: Tue, 5 Mar 2024 16:07:05 +0800 Subject: [PATCH] =?UTF-8?q?Moonshot-=20=E5=9C=A8config.py=E4=B8=AD?= =?UTF-8?q?=E5=A2=9E=E5=8A=A0=E5=8F=AF=E7=94=A8=E6=A8=A1=E5=9E=8B=20(#1603?= =?UTF-8?q?)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * 支持月之暗面api * fix文案 * 优化noui的返回值,对话历史文件继续上传到moonshat * fix * config 可用模型配置增加 * add `can_multi_thread` model attr (#1598) --------- Co-authored-by: binary-husky <96192199+binary-husky@users.noreply.github.com> Co-authored-by: binary-husky --- request_llms/bridge_moonshot.py | 16 +++++++++------- 1 file changed, 9 insertions(+), 7 deletions(-) diff --git a/request_llms/bridge_moonshot.py b/request_llms/bridge_moonshot.py index 65f8ce9..7014032 100644 --- a/request_llms/bridge_moonshot.py +++ b/request_llms/bridge_moonshot.py @@ -79,6 +79,8 @@ class MoonShotInit: if system_prompt: messages.append({"role": "system", "content": system_prompt}) messages.extend(self.__converter_file(inputs)) + for i in history[0::2]: # 历史文件继续上传 + messages.extend(self.__converter_file(i)) messages.extend(self.__conversation_history(history)) messages.append(self.__converter_user(inputs)) header = { @@ -169,21 +171,21 @@ def predict_no_ui_long_connection(inputs, llm_kwargs, history=[], sys_prompt="", gpt_bro_init = MoonShotInit() watch_dog_patience = 60 # 看门狗的耐心, 设置10秒即可 stream_response = gpt_bro_init.generate_messages(inputs, llm_kwargs, history, sys_prompt, True) - gpt_bro_result = '' - for content, gpt_bro_result, error_bro_meg in stream_response: - gpt_bro_result += content + moonshot_bro_result = '' + for content, moonshot_bro_result, error_bro_meg in stream_response: + moonshot_bro_result = moonshot_bro_result if error_bro_meg: if len(observe_window) >= 3: observe_window[2] = error_bro_meg - return f'{gpt_bro_result} 对话错误' + return f'{moonshot_bro_result} 对话错误' # 观测窗 if len(observe_window) >= 1: - observe_window[0] = gpt_bro_result + observe_window[0] = moonshot_bro_result if len(observe_window) >= 2: if (time.time() - observe_window[1]) > watch_dog_patience: observe_window[2] = "请求超时,程序终止。" - raise RuntimeError(f"{gpt_bro_result} 程序终止。") - return gpt_bro_result + raise RuntimeError(f"{moonshot_bro_result} 程序终止。") + return moonshot_bro_result if __name__ == '__main__': moon_ai = MoonShotInit()