diff --git a/crazy_functions/latex_fns/latex_toolbox.py b/crazy_functions/latex_fns/latex_toolbox.py index a0c889a..5adc7ea 100644 --- a/crazy_functions/latex_fns/latex_toolbox.py +++ b/crazy_functions/latex_fns/latex_toolbox.py @@ -281,9 +281,12 @@ def rm_comments(main_file): def find_tex_file_ignore_case(fp): dir_name = os.path.dirname(fp) base_name = os.path.basename(fp) + # 如果输入的文件路径是正确的 + if os.path.exists(pj(dir_name, base_name)): return pj(dir_name, base_name) + # 如果不正确,试着加上.tex后缀试试 if not base_name.endswith('.tex'): base_name+='.tex' if os.path.exists(pj(dir_name, base_name)): return pj(dir_name, base_name) - # go case in-sensitive + # 如果还找不到,解除大小写限制,再试一次 import glob for f in glob.glob(dir_name+'/*.tex'): base_name_s = os.path.basename(fp) diff --git a/request_llm/bridge_chatgpt.py b/request_llm/bridge_chatgpt.py index 87cc664..5a7a274 100644 --- a/request_llm/bridge_chatgpt.py +++ b/request_llm/bridge_chatgpt.py @@ -177,14 +177,13 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp yield from update_ui(chatbot=chatbot, history=history, msg="非Openai官方接口返回了错误:" + chunk.decode()) # 刷新界面 return - # print(chunk.decode()[6:]) - if is_head_of_the_stream and (r'"object":"error"' not in chunk.decode()): + chunk_decoded = chunk.decode() + if is_head_of_the_stream and (r'"object":"error"' not in chunk_decoded) and (r"choices" not in chunk_decoded): # 数据流的第一帧不携带content is_head_of_the_stream = False; continue if chunk: try: - chunk_decoded = chunk.decode() # 前者是API2D的结束条件,后者是OPENAI的结束条件 if ('data: [DONE]' in chunk_decoded) or (len(json.loads(chunk_decoded[6:])['choices'][0]["delta"]) == 0): # 判定为数据流的结束,gpt_replying_buffer也写完了