From 3041858e7f7d41195be5a6c43852fb78effe92b5 Mon Sep 17 00:00:00 2001 From: Your Name Date: Sun, 23 Apr 2023 23:16:25 +0800 Subject: [PATCH] =?UTF-8?q?=E4=BC=98=E5=8C=96=E6=8F=90=E7=A4=BA?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- request_llm/bridge_newbing.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/request_llm/bridge_newbing.py b/request_llm/bridge_newbing.py index 321e42d..43c3e76 100644 --- a/request_llm/bridge_newbing.py +++ b/request_llm/bridge_newbing.py @@ -611,7 +611,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp 单线程方法 函数的说明请见 request_llm/bridge_all.py """ - chatbot.append((inputs, "[Local Message]: 等待Bing响应 ...")) + chatbot.append((inputs, "[Local Message]: 等待NewBing响应中 ...")) global newbing_handle if (newbing_handle is None) or (not newbing_handle.success): @@ -633,7 +633,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp for i in range(len(history)//2): history_feedin.append([history[2*i], history[2*i+1]] ) - chatbot[-1] = (inputs, "[Local Message]: 等待Bing响应 ...") + chatbot[-1] = (inputs, "[Local Message]: 等待NewBing响应中 ...") yield from update_ui(chatbot=chatbot, history=history, msg="NewBing响应缓慢,尚未完成全部响应,请耐心完成后再提交新问题。") for response in newbing_handle.stream_chat(query=inputs, history=history_feedin, system_prompt=system_prompt, max_length=llm_kwargs['max_length'], top_p=llm_kwargs['top_p'], temperature=llm_kwargs['temperature']): chatbot[-1] = (inputs, preprocess_newbing_out(response))