From 781ef4487c7d9cecc47c73c110676aabdcaa6789 Mon Sep 17 00:00:00 2001 From: Your Name Date: Sun, 23 Apr 2023 22:44:18 +0800 Subject: [PATCH] =?UTF-8?q?=E4=BF=AE=E5=A4=8D=E4=B8=80=E4=BA=9B=E7=BB=86?= =?UTF-8?q?=E8=8A=82?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- request_llm/bridge_all.py | 2 +- request_llm/bridge_newbing.py | 12 ++++++++---- 2 files changed, 9 insertions(+), 5 deletions(-) diff --git a/request_llm/bridge_all.py b/request_llm/bridge_all.py index 7937d5a..fddc9a7 100644 --- a/request_llm/bridge_all.py +++ b/request_llm/bridge_all.py @@ -192,7 +192,7 @@ def predict_no_ui_long_connection(inputs, llm_kwargs, history, sys_prompt, obser def mutex_manager(window_mutex, observe_window): while True: - time.sleep(0.5) + time.sleep(0.25) if not window_mutex[-1]: break # 看门狗(watchdog) for i in range(n_model): diff --git a/request_llm/bridge_newbing.py b/request_llm/bridge_newbing.py index 986dc56..3fdc7ba 100644 --- a/request_llm/bridge_newbing.py +++ b/request_llm/bridge_newbing.py @@ -406,7 +406,7 @@ class Chatbot: -load_message = "" +load_message = "等待NewBing响应。" """ ======================================================================== @@ -574,13 +574,16 @@ def predict_no_ui_long_connection(inputs, llm_kwargs, history=[], sys_prompt="", watch_dog_patience = 5 # 看门狗 (watchdog) 的耐心, 设置5秒即可 response = "" for response in newbing_handle.stream_chat(query=inputs, history=history_feedin, system_prompt=sys_prompt, max_length=llm_kwargs['max_length'], top_p=llm_kwargs['top_p'], temperature=llm_kwargs['temperature']): - observe_window[0] = response + observe_window[0] = preprocess_newbing_out_simple(response) if len(observe_window) >= 2: if (time.time()-observe_window[1]) > watch_dog_patience: raise RuntimeError("程序终止。") - return response - + return preprocess_newbing_out_simple(response) +def preprocess_newbing_out_simple(result): + if '[1]' in result: + result += '\n\n```\n' + "\n".join([r for r in result.split('\n') if r.startswith('[')]) + '\n```\n' + return result def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_prompt='', stream = True, additional_fn=None): """ @@ -609,6 +612,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp for i in range(len(history)//2): history_feedin.append([history[2*i], history[2*i+1]] ) + chatbot[-1] = (inputs, preprocess_newbing_out(response)) yield from update_ui(chatbot=chatbot, history=history, msg="NewBing响应缓慢,尚未完成全部响应,请耐心完成后再提交新问题。") for response in newbing_handle.stream_chat(query=inputs, history=history_feedin, system_prompt=system_prompt, max_length=llm_kwargs['max_length'], top_p=llm_kwargs['top_p'], temperature=llm_kwargs['temperature']): chatbot[-1] = (inputs, preprocess_newbing_out(response))