From bfb7aab4a0a824411eef647fab9dfdd3133518ca Mon Sep 17 00:00:00 2001 From: Hongyi Zhao Date: Wed, 2 Aug 2023 18:03:16 +0800 Subject: [PATCH] Fix the reverse proxy based OpenAI access via https://github.com/acheong08/ChatGPT-to-API/. See https://github.com/binary-husky/gpt_academic/issues/900#issuecomment-1658463065 for more detailed discussions. --- request_llm/bridge_chatgpt.py | 7 ++++--- 1 file changed, 4 insertions(+), 3 deletions(-) diff --git a/request_llm/bridge_chatgpt.py b/request_llm/bridge_chatgpt.py index ea48fba..96af833 100644 --- a/request_llm/bridge_chatgpt.py +++ b/request_llm/bridge_chatgpt.py @@ -186,15 +186,16 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp try: chunk_decoded = chunk.decode() # 前者是API2D的结束条件,后者是OPENAI的结束条件 - if ('data: [DONE]' in chunk_decoded) or (len(json.loads(chunk_decoded[6:])['choices'][0]["delta"]) == 0): + if 'data: [DONE]' in chunk_decoded: # 判定为数据流的结束,gpt_replying_buffer也写完了 logging.info(f'[response] {gpt_replying_buffer}') break # 处理数据流的主体 chunkjson = json.loads(chunk_decoded[6:]) status_text = f"finish_reason: {chunkjson['choices'][0]['finish_reason']}" - # 如果这里抛出异常,一般是文本过长,详情见get_full_error的输出 - gpt_replying_buffer = gpt_replying_buffer + json.loads(chunk_decoded[6:])['choices'][0]["delta"]["content"] + delta = chunkjson['choices'][0]["delta"] + if "content" in delta: + gpt_replying_buffer = gpt_replying_buffer + delta["content"] history[-1] = gpt_replying_buffer chatbot[-1] = (history[-2], history[-1]) yield from update_ui(chatbot=chatbot, history=history, msg=status_text) # 刷新界面