This commit is contained in:
binary-husky 2023-12-15 13:32:39 +08:00
parent 36e19d5202
commit d169fb4b16
3 changed files with 4 additions and 4 deletions

View File

@ -404,7 +404,7 @@ def 编译Latex(chatbot, history, main_file_original, main_file_modified, work_f
result_pdf = pj(work_folder_modified, f'merge_diff.pdf') # get pdf path
promote_file_to_downloadzone(result_pdf, rename_file=None, chatbot=chatbot) # promote file to web UI
if modified_pdf_success:
yield from update_ui_lastest_msg(f'转化PDF编译已经成功, 正在尝试生成对比PDF, 请稍 ...', chatbot, history) # 刷新Gradio前端界面
yield from update_ui_lastest_msg(f'转化PDF编译已经成功, 正在尝试生成对比PDF, 请稍 ...', chatbot, history) # 刷新Gradio前端界面
result_pdf = pj(work_folder_modified, f'{main_file_modified}.pdf') # get pdf path
origin_pdf = pj(work_folder_original, f'{main_file_original}.pdf') # get pdf path
if os.path.exists(pj(work_folder, '..', 'translation')):

View File

@ -2863,7 +2863,7 @@
"加载API_KEY": "Loading API_KEY",
"协助您编写代码": "Assist you in writing code",
"我可以为您提供以下服务": "I can provide you with the following services",
"排队中请稍 ...": "Please wait in line ...",
"排队中请稍 ...": "Please wait in line ...",
"建议您使用英文提示词": "It is recommended to use English prompts",
"不能支撑AutoGen运行": "Cannot support AutoGen operation",
"帮助您解决编程问题": "Help you solve programming problems",

View File

@ -183,11 +183,11 @@ class LocalLLMHandle(Process):
def stream_chat(self, **kwargs):
# ⭐run in main process
if self.get_state() == "`准备就绪`":
yield "`正在等待线程锁,排队中请稍 ...`"
yield "`正在等待线程锁,排队中请稍 ...`"
with self.threadLock:
if self.parent.poll():
yield "`排队中请稍 ...`"
yield "`排队中请稍 ...`"
self.clear_pending_messages()
self.parent.send(kwargs)
std_out = ""