From 8bdcc4ff28bbe5d4428057d6d886b0cd4235e958 Mon Sep 17 00:00:00 2001 From: binary-husky Date: Sat, 28 Oct 2023 14:32:03 +0800 Subject: [PATCH] =?UTF-8?q?=E4=BF=AE=E5=A4=8D=E5=AF=B9=E4=B8=80=E4=BA=9B?= =?UTF-8?q?=E7=AC=AC=E4=B8=89=E6=96=B9=E6=8E=A5=E5=8F=A3=E7=9A=84=E5=85=BC?= =?UTF-8?q?=E5=AE=B9=E6=80=A7?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- request_llm/bridge_chatgpt.py | 5 ++++- 1 file changed, 4 insertions(+), 1 deletion(-) diff --git a/request_llm/bridge_chatgpt.py b/request_llm/bridge_chatgpt.py index 660300e..b498d4d 100644 --- a/request_llm/bridge_chatgpt.py +++ b/request_llm/bridge_chatgpt.py @@ -49,7 +49,7 @@ def decode_chunk(chunk): has_role = False try: chunkjson = json.loads(chunk_decoded[6:]) - has_choices = 'choices' in chunkjson + has_choices = ('choices' in chunkjson) and (len(chunkjson['choices']) > 0) if has_choices: has_content = "content" in chunkjson['choices'][0]["delta"] if has_choices: has_role = "role" in chunkjson['choices'][0]["delta"] except: @@ -216,6 +216,9 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp if chunk: try: + if not has_choices: + # 一些垃圾第三方接口的出现这样的错误 + continue # 前者是API2D的结束条件,后者是OPENAI的结束条件 if ('data: [DONE]' in chunk_decoded) or (len(chunkjson['choices'][0]["delta"]) == 0): # 判定为数据流的结束,gpt_replying_buffer也写完了