From 8e59412c47b49d8929a2c53fd332d0e933e1b688 Mon Sep 17 00:00:00 2001 From: binary-husky <505030475@qq.com> Date: Mon, 24 Apr 2023 20:14:23 +0800 Subject: [PATCH] =?UTF-8?q?=E4=BF=AE=E6=AD=A3newbing=E4=BA=A4=E4=BA=92?= =?UTF-8?q?=E7=9A=84=E4=B8=8D=E5=90=88=E7=90=86=E4=BB=A3=E7=A0=81?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- request_llm/bridge_newbing.py | 13 +++++++------ 1 file changed, 7 insertions(+), 6 deletions(-) diff --git a/request_llm/bridge_newbing.py b/request_llm/bridge_newbing.py index 232eb6b..eb8c916 100644 --- a/request_llm/bridge_newbing.py +++ b/request_llm/bridge_newbing.py @@ -88,14 +88,14 @@ class NewBingHandle(Process): if a not in self.local_history: self.local_history.append(a) prompt += a + '\n' - if b not in self.local_history: - self.local_history.append(b) - prompt += b + '\n' + # if b not in self.local_history: + # self.local_history.append(b) + # prompt += b + '\n' # 问题 prompt += question self.local_history.append(question) - + print('question:', question) # 提交 async for final, response in self.newbing_model.ask_stream( prompt=question, @@ -108,7 +108,8 @@ class NewBingHandle(Process): else: print('-------- receive final ---------') self.child.send('[Finish]') - + # self.local_history.append(response) + def run(self): """ @@ -245,6 +246,6 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp chatbot[-1] = (inputs, preprocess_newbing_out(response)) yield from update_ui(chatbot=chatbot, history=history, msg="NewBing响应缓慢,尚未完成全部响应,请耐心完成后再提交新问题。") - history.extend([inputs, preprocess_newbing_out(response)]) + history.extend([inputs, response]) yield from update_ui(chatbot=chatbot, history=history, msg="完成全部响应,请提交新问题。")