From ce1fc3a99909a503880c800857f4f3dd6c9cf59b Mon Sep 17 00:00:00 2001 From: Your Name Date: Fri, 21 Apr 2023 19:28:32 +0800 Subject: [PATCH] =?UTF-8?q?=E4=BF=AE=E6=94=B9chatglm=E4=B8=8D=E8=AE=B0?= =?UTF-8?q?=E5=BF=86=E4=B8=8A=E4=B8=8B=E6=96=87=E7=9A=84bug?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- request_llm/bridge_chatglm.py | 9 ++++++--- 1 file changed, 6 insertions(+), 3 deletions(-) diff --git a/request_llm/bridge_chatglm.py b/request_llm/bridge_chatglm.py index 7af2835..8eef322 100644 --- a/request_llm/bridge_chatglm.py +++ b/request_llm/bridge_chatglm.py @@ -92,8 +92,8 @@ def predict_no_ui_long_connection(inputs, llm_kwargs, history=[], sys_prompt="", # chatglm 没有 sys_prompt 接口,因此把prompt加入 history history_feedin = [] + history_feedin.append(["What can I do?", sys_prompt]) for i in range(len(history)//2): - history_feedin.append(["What can I do?", sys_prompt] ) history_feedin.append([history[2*i], history[2*i+1]] ) watch_dog_patience = 5 # 看门狗 (watchdog) 的耐心, 设置5秒即可 @@ -131,10 +131,13 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"] history_feedin = [] + history_feedin.append(["What can I do?", system_prompt] ) for i in range(len(history)//2): - history_feedin.append(["What can I do?", system_prompt] ) history_feedin.append([history[2*i], history[2*i+1]] ) for response in glm_handle.stream_chat(query=inputs, history=history_feedin, max_length=llm_kwargs['max_length'], top_p=llm_kwargs['top_p'], temperature=llm_kwargs['temperature']): chatbot[-1] = (inputs, response) - yield from update_ui(chatbot=chatbot, history=history) \ No newline at end of file + yield from update_ui(chatbot=chatbot, history=history) + + history.extend([inputs, response]) + yield from update_ui(chatbot=chatbot, history=history) \ No newline at end of file