From 43e64782dc2fc8d4451aeb37f10d17063d6c76a1 Mon Sep 17 00:00:00 2001 From: 505030475 <505030475@qq.com> Date: Tue, 16 May 2023 00:35:47 +0800 Subject: [PATCH] =?UTF-8?q?=E4=BF=AE=E6=AD=A3=E9=9D=9E=E5=AE=98=E6=96=B9?= =?UTF-8?q?=E7=9A=84OpenAI=E5=8F=8D=E4=BB=A3=E9=94=99=E8=AF=AF=E6=98=BE?= =?UTF-8?q?=E7=A4=BA=E9=97=AE=E9=A2=98?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- request_llm/bridge_chatgpt.py | 10 +++++++++- 1 file changed, 9 insertions(+), 1 deletion(-) diff --git a/request_llm/bridge_chatgpt.py b/request_llm/bridge_chatgpt.py index aa6ae72..eef8fbf 100644 --- a/request_llm/bridge_chatgpt.py +++ b/request_llm/bridge_chatgpt.py @@ -168,7 +168,15 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp if stream: stream_response = response.iter_lines() while True: - chunk = next(stream_response) + try: + chunk = next(stream_response) + except StopIteration: + # 非OpenAI官方接口的出现这样的报错,OpenAI和API2D不会走这里 + from toolbox import regular_txt_to_markdown; tb_str = '```\n' + trimmed_format_exc() + '```' + chatbot[-1] = (chatbot[-1][0], f"[Local Message] 远程返回错误: \n\n{tb_str} \n\n{regular_txt_to_markdown(chunk.decode())}") + yield from update_ui(chatbot=chatbot, history=history, msg="远程返回错误:" + chunk.decode()) # 刷新界面 + return + # print(chunk.decode()[6:]) if is_head_of_the_stream and (r'"object":"error"' not in chunk.decode()): # 数据流的第一帧不携带content