diff --git a/crazy_functions/Latex全文润色.py b/crazy_functions/Latex全文润色.py index 2caeb2e..663a062 100644 --- a/crazy_functions/Latex全文润色.py +++ b/crazy_functions/Latex全文润色.py @@ -3,7 +3,7 @@ from toolbox import CatchException, report_execption, write_results_to_file, pre fast_debug = False -def 解析Paper(file_manifest, project_folder, top_p, temperature, chatbot, history, systemPromptTxt): +def 解析Paper(file_manifest, project_folder, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt): import time, glob, os print('begin analysis on:', file_manifest) for index, fp in enumerate(file_manifest): @@ -14,39 +14,39 @@ def 解析Paper(file_manifest, project_folder, top_p, temperature, chatbot, hist i_say = prefix + f'请对下面的文章片段用中文做一个概述,文件名是{os.path.relpath(fp, project_folder)},文章内容是 ```{file_content}```' i_say_show_user = prefix + f'[{index}/{len(file_manifest)}] 请对下面的文章片段做一个概述: {os.path.abspath(fp)}' chatbot.append((i_say_show_user, "[Local Message] waiting gpt response.")) - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 if not fast_debug: msg = '正常' # ** gpt request ** - gpt_say = yield from predict_no_ui_but_counting_down(i_say, i_say_show_user, chatbot, top_p, temperature, history=[]) # 带超时倒计时 + gpt_say = yield from predict_no_ui_but_counting_down(i_say, i_say_show_user, chatbot, llm_kwargs, plugin_kwargs, history=[]) # 带超时倒计时 chatbot[-1] = (i_say_show_user, gpt_say) history.append(i_say_show_user); history.append(gpt_say) - yield from update_ui(chatbot=chatbot, history=chatbot, msg=msg) + yield from update_ui(chatbot=chatbot, history=chatbot, msg=msg) # 刷新界面 if not fast_debug: time.sleep(2) all_file = ', '.join([os.path.relpath(fp, project_folder) for index, fp in enumerate(file_manifest)]) i_say = f'根据以上你自己的分析,对全文进行概括,用学术性语言写一段中文摘要,然后再写一段英文摘要(包括{all_file})。' chatbot.append((i_say, "[Local Message] waiting gpt response.")) - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 if not fast_debug: msg = '正常' # ** gpt request ** - gpt_say = yield from predict_no_ui_but_counting_down(i_say, i_say, chatbot, top_p, temperature, history=history) # 带超时倒计时 + gpt_say = yield from predict_no_ui_but_counting_down(i_say, i_say, chatbot, llm_kwargs, plugin_kwargs, history=history) # 带超时倒计时 chatbot[-1] = (i_say, gpt_say) history.append(i_say); history.append(gpt_say) - yield from update_ui(chatbot=chatbot, history=chatbot, msg=msg) + yield from update_ui(chatbot=chatbot, history=chatbot, msg=msg) # 刷新界面 res = write_results_to_file(history) chatbot.append(("完成了吗?", res)) - yield from update_ui(chatbot=chatbot, history=chatbot, msg=msg) + yield from update_ui(chatbot=chatbot, history=chatbot, msg=msg) # 刷新界面 @CatchException -def 读文章写摘要(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT): +def 读文章写摘要(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): history = [] # 清空历史,以免输入溢出 import glob, os if os.path.exists(txt): @@ -54,13 +54,13 @@ def 读文章写摘要(txt, top_p, temperature, chatbot, history, systemPromptTx else: if txt == "": txt = '空空如也的输入栏' report_execption(chatbot, history, a = f"解析项目: {txt}", b = f"找不到本地项目或无权访问: {txt}") - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 return file_manifest = [f for f in glob.glob(f'{project_folder}/**/*.tex', recursive=True)] # + \ # [f for f in glob.glob(f'{project_folder}/**/*.cpp', recursive=True)] + \ # [f for f in glob.glob(f'{project_folder}/**/*.c', recursive=True)] if len(file_manifest) == 0: report_execption(chatbot, history, a = f"解析项目: {txt}", b = f"找不到任何.tex文件: {txt}") - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 return - yield from 解析Paper(file_manifest, project_folder, top_p, temperature, chatbot, history, systemPromptTxt) + yield from 解析Paper(file_manifest, project_folder, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt) diff --git a/crazy_functions/crazy_utils.py b/crazy_functions/crazy_utils.py index 1a8308f..0c46349 100644 --- a/crazy_functions/crazy_utils.py +++ b/crazy_functions/crazy_utils.py @@ -37,7 +37,7 @@ def input_clipping(inputs, history, max_token_limit): return inputs, history def request_gpt_model_in_new_thread_with_ui_alive( - inputs, inputs_show_user, top_p, temperature, + inputs, inputs_show_user, llm_kwargs, chatbot, history, sys_prompt, refresh_interval=0.2, handle_token_exceed=True, retry_times_at_unknown_error=2, @@ -66,7 +66,7 @@ def request_gpt_model_in_new_thread_with_ui_alive( # 用户反馈 chatbot.append([inputs_show_user, ""]) msg = '正常' - yield from update_ui(chatbot=chatbot, history=[]) + yield from update_ui(chatbot=chatbot, history=[]) # 刷新界面 executor = ThreadPoolExecutor(max_workers=16) mutable = ["", time.time()] def _req_gpt(inputs, history, sys_prompt): @@ -76,7 +76,7 @@ def request_gpt_model_in_new_thread_with_ui_alive( try: # 【第一种情况】:顺利完成 result = predict_no_ui_long_connection( - inputs=inputs, top_p=top_p, temperature=temperature, + inputs=inputs, llm_kwargs=llm_kwargs, history=history, sys_prompt=sys_prompt, observe_window=mutable) return result except ConnectionAbortedError as token_exceeded_error: @@ -118,12 +118,12 @@ def request_gpt_model_in_new_thread_with_ui_alive( if future.done(): break chatbot[-1] = [chatbot[-1][0], mutable[0]] - yield from update_ui(chatbot=chatbot, history=[]) + yield from update_ui(chatbot=chatbot, history=[]) # 刷新界面 return future.result() def request_gpt_model_multi_threads_with_very_awesome_ui_and_high_efficiency( - inputs_array, inputs_show_user_array, top_p, temperature, + inputs_array, inputs_show_user_array, llm_kwargs, chatbot, history_array, sys_prompt_array, refresh_interval=0.2, max_workers=10, scroller_max_len=30, handle_token_exceed=True, show_user_at_complete=False, @@ -141,8 +141,7 @@ def request_gpt_model_multi_threads_with_very_awesome_ui_and_high_efficiency( 输入参数 Args (以_array结尾的输入变量都是列表,列表长度为子任务的数量,执行时,会把列表拆解,放到每个子线程中分别执行): inputs_array (list): List of inputs (每个子任务的输入) inputs_show_user_array (list): List of inputs to show user(每个子任务展现在报告中的输入,借助此参数,在汇总报告中隐藏啰嗦的真实输入,增强报告的可读性) - top_p (float): Top p value for sampling from model distribution (GPT参数,浮点数) - temperature (float): Temperature value for sampling from model distribution(GPT参数,浮点数) + llm_kwargs: llm_kwargs参数 chatbot: chatbot (用户界面对话窗口句柄,用于数据流可视化) history_array (list): List of chat history (历史对话输入,双层列表,第一层列表是子任务分解,第二层列表是对话历史) sys_prompt_array (list): List of system prompts (系统输入,列表,用于输入给GPT的前提提示,比如你是翻译官怎样怎样) @@ -167,7 +166,7 @@ def request_gpt_model_multi_threads_with_very_awesome_ui_and_high_efficiency( # 用户反馈 chatbot.append(["请开始多线程操作。", ""]) msg = '正常' - yield from update_ui(chatbot=chatbot, history=[]) + yield from update_ui(chatbot=chatbot, history=[]) # 刷新界面 # 异步原子 mutable = [["", time.time(), "等待中"] for _ in range(n_frag)] @@ -181,7 +180,7 @@ def request_gpt_model_multi_threads_with_very_awesome_ui_and_high_efficiency( # 【第一种情况】:顺利完成 # time.sleep(10); raise RuntimeError("测试") gpt_say = predict_no_ui_long_connection( - inputs=inputs, top_p=top_p, temperature=temperature, history=history, + inputs=inputs, llm_kwargs=llm_kwargs, history=history, sys_prompt=sys_prompt, observe_window=mutable[index], console_slience=True ) mutable[index][2] = "已成功" @@ -253,7 +252,7 @@ def request_gpt_model_multi_threads_with_very_awesome_ui_and_high_efficiency( for thread_index, done, obs in zip(range(len(worker_done)), worker_done, observe_win)]) chatbot[-1] = [chatbot[-1][0], f'多线程操作已经开始,完成情况: \n\n{stat_str}' + ''.join(['.']*(cnt % 10+1))] msg = "正常" - yield from update_ui(chatbot=chatbot, history=[]) + yield from update_ui(chatbot=chatbot, history=[]) # 刷新界面 # 异步任务结束 gpt_response_collection = [] for inputs_show_user, f in zip(inputs_show_user_array, futures): @@ -264,7 +263,7 @@ def request_gpt_model_multi_threads_with_very_awesome_ui_and_high_efficiency( for inputs_show_user, f in zip(inputs_show_user_array, futures): gpt_res = f.result() chatbot.append([inputs_show_user, gpt_res]) - yield from update_ui(chatbot=chatbot, history=[]) + yield from update_ui(chatbot=chatbot, history=[]) # 刷新界面 time.sleep(1) return gpt_response_collection diff --git a/crazy_functions/下载arxiv论文翻译摘要.py b/crazy_functions/下载arxiv论文翻译摘要.py index 4409c29..6cef3da 100644 --- a/crazy_functions/下载arxiv论文翻译摘要.py +++ b/crazy_functions/下载arxiv论文翻译摘要.py @@ -133,7 +133,7 @@ def get_name(_url_): @CatchException -def 下载arxiv论文并翻译摘要(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT): +def 下载arxiv论文并翻译摘要(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): CRAZY_FUNCTION_INFO = "下载arxiv论文并翻译摘要,函数插件作者[binary-husky]。正在提取摘要并下载PDF文档……" import glob @@ -141,7 +141,7 @@ def 下载arxiv论文并翻译摘要(txt, top_p, temperature, chatbot, history, # 基本信息:功能、贡献者 chatbot.append(["函数插件功能?", CRAZY_FUNCTION_INFO]) - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 # 尝试导入依赖,如果缺少依赖,则给出安装建议 try: @@ -150,7 +150,7 @@ def 下载arxiv论文并翻译摘要(txt, top_p, temperature, chatbot, history, report_execption(chatbot, history, a = f"解析项目: {txt}", b = f"导入软件依赖失败。使用该模块需要额外依赖,安装方法```pip install --upgrade pdfminer beautifulsoup4```。") - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 return # 清空历史,以免输入溢出 @@ -163,25 +163,25 @@ def 下载arxiv论文并翻译摘要(txt, top_p, temperature, chatbot, history, report_execption(chatbot, history, a = f"解析项目: {txt}", b = f"下载pdf文件未成功") - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 return # 翻译摘要等 i_say = f"请你阅读以下学术论文相关的材料,提取摘要,翻译为中文。材料如下:{str(info)}" i_say_show_user = f'请你阅读以下学术论文相关的材料,提取摘要,翻译为中文。论文:{pdf_path}' chatbot.append((i_say_show_user, "[Local Message] waiting gpt response.")) - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 msg = '正常' # ** gpt request ** - gpt_say = yield from predict_no_ui_but_counting_down(i_say, i_say_show_user, chatbot, top_p, temperature, history=[]) # 带超时倒计时 + gpt_say = yield from predict_no_ui_but_counting_down(i_say, i_say_show_user, chatbot, llm_kwargs, plugin_kwargs, history=[]) # 带超时倒计时 chatbot[-1] = (i_say_show_user, gpt_say) history.append(i_say_show_user); history.append(gpt_say) - yield from update_ui(chatbot=chatbot, history=chatbot, msg=msg) + yield from update_ui(chatbot=chatbot, history=chatbot, msg=msg) # 刷新界面 # 写入文件 import shutil # 重置文件的创建时间 shutil.copyfile(pdf_path, f'./gpt_log/{os.path.basename(pdf_path)}'); os.remove(pdf_path) res = write_results_to_file(history) chatbot.append(("完成了吗?", res + "\n\nPDF文件也已经下载")) - yield from update_ui(chatbot=chatbot, history=chatbot, msg=msg) + yield from update_ui(chatbot=chatbot, history=chatbot, msg=msg) # 刷新界面 diff --git a/crazy_functions/代码重写为全英文_多线程.py b/crazy_functions/代码重写为全英文_多线程.py index 4dc7c95..608169f 100644 --- a/crazy_functions/代码重写为全英文_多线程.py +++ b/crazy_functions/代码重写为全英文_多线程.py @@ -23,7 +23,7 @@ def break_txt_into_half_at_some_linebreak(txt): @CatchException -def 全项目切换英文(txt, top_p, temperature, chatbot, history, sys_prompt, WEB_PORT): +def 全项目切换英文(txt, llm_kwargs, plugin_kwargs, chatbot, history, sys_prompt, web_port): # 第1步:清空历史,以免输入溢出 history = [] @@ -34,7 +34,7 @@ def 全项目切换英文(txt, top_p, temperature, chatbot, history, sys_prompt, report_execption(chatbot, history, a = f"解析项目: {txt}", b = f"导入软件依赖失败。使用该模块需要额外依赖,安装方法```pip install --upgrade openai transformers```。") - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 return # 第3步:集合文件 @@ -54,7 +54,7 @@ def 全项目切换英文(txt, top_p, temperature, chatbot, history, sys_prompt, i_say_show_user =f'[{index}/{len(file_manifest)}] 接下来请将以下代码中包含的所有中文转化为英文,只输出转化后的英文代码,请用代码块输出代码: {os.path.abspath(fp)}' i_say_show_user_buffer.append(i_say_show_user) chatbot.append((i_say_show_user, "[Local Message] 等待多线程操作,中间过程不予显示.")) - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 # 第5步:Token限制下的截断与处理 @@ -82,7 +82,7 @@ def 全项目切换英文(txt, top_p, temperature, chatbot, history, sys_prompt, for file_content_partial in file_content_breakdown: i_say = i_say_template(fp, file_content_partial) # # ** gpt request ** - gpt_say_partial = predict_no_ui_long_connection(inputs=i_say, top_p=top_p, temperature=temperature, history=[], sys_prompt=sys_prompt, observe_window=observe_window[index]) + gpt_say_partial = predict_no_ui_long_connection(inputs=i_say, llm_kwargs=llm_kwargs, history=[], sys_prompt=sys_prompt, observe_window=observe_window[index]) gpt_say_partial = extract_code_block_carefully(gpt_say_partial) gpt_say += gpt_say_partial mutable_return[index] = gpt_say @@ -97,7 +97,7 @@ def 全项目切换英文(txt, top_p, temperature, chatbot, history, sys_prompt, h.daemon = True h.start() chatbot.append(('开始了吗?', f'多线程操作已经开始')) - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 # 第8步:循环轮询各个线程是否执行完毕 cnt = 0 @@ -113,7 +113,7 @@ def 全项目切换英文(txt, top_p, temperature, chatbot, history, sys_prompt, stat = [f'执行中: {obs}\n\n' if alive else '已完成\n\n' for alive, obs in zip(th_alive, observe_win)] stat_str = ''.join(stat) chatbot[-1] = (chatbot[-1][0], f'多线程操作已经开始,完成情况: \n\n{stat_str}' + ''.join(['.']*(cnt%10+1))) - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 # 第9步:把结果写入文件 for index, h in enumerate(handles): @@ -130,10 +130,10 @@ def 全项目切换英文(txt, top_p, temperature, chatbot, history, sys_prompt, shutil.copyfile(file_manifest[index], where_to_relocate) chatbot.append((i_say_show_user, f'[Local Message] 已完成{os.path.abspath(fp)}的转化,\n\n存入{os.path.abspath(where_to_relocate)}')) history.append(i_say_show_user); history.append(gpt_say) - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 time.sleep(1) # 第10步:备份一个文件 res = write_results_to_file(history) chatbot.append(("生成一份任务执行报告", res)) - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 diff --git a/crazy_functions/总结word文档.py b/crazy_functions/总结word文档.py index 2cfbc7f..82eec1c 100644 --- a/crazy_functions/总结word文档.py +++ b/crazy_functions/总结word文档.py @@ -4,7 +4,7 @@ from toolbox import CatchException, report_execption, write_results_to_file, pre fast_debug = False -def 解析docx(file_manifest, project_folder, top_p, temperature, chatbot, history, systemPromptTxt): +def 解析docx(file_manifest, project_folder, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt): import time, os # pip install python-docx 用于docx格式,跨平台 # pip install pywin32 用于doc格式,仅支持Win平台 @@ -36,58 +36,58 @@ def 解析docx(file_manifest, project_folder, top_p, temperature, chatbot, histo f'文章内容是 ```{file_content}```' i_say_show_user = prefix + f'[{index+1}/{len(file_manifest)}] 假设你是论文审稿专家,请对下面的文章片段做概述: {os.path.abspath(fp)}' chatbot.append((i_say_show_user, "[Local Message] waiting gpt response.")) - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 if not fast_debug: msg = '正常' # ** gpt request ** - gpt_say = yield from predict_no_ui_but_counting_down(i_say, i_say_show_user, chatbot, top_p, temperature, + gpt_say = yield from predict_no_ui_but_counting_down(i_say, i_say_show_user, chatbot, llm_kwargs, plugin_kwargs, history=[]) # 带超时倒计时 chatbot[-1] = (i_say_show_user, gpt_say) history.append(i_say_show_user); history.append(gpt_say) - yield from update_ui(chatbot=chatbot, history=chatbot, msg=msg) + yield from update_ui(chatbot=chatbot, history=chatbot, msg=msg) # 刷新界面 if not fast_debug: time.sleep(2) """ # 可按需启用 i_say = f'根据你上述的分析,对全文进行概括,用学术性语言写一段中文摘要,然后再写一篇英文的。' chatbot.append((i_say, "[Local Message] waiting gpt response.")) - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 i_say = f'我想让你做一个论文写作导师。您的任务是使用人工智能工具(例如自然语言处理)提供有关如何改进其上述文章的反馈。' \ f'您还应该利用您在有效写作技巧方面的修辞知识和经验来建议作者可以更好地以书面形式表达他们的想法和想法的方法。' \ f'根据你之前的分析,提出建议' chatbot.append((i_say, "[Local Message] waiting gpt response.")) - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 """ if not fast_debug: msg = '正常' # ** gpt request ** - gpt_say = yield from predict_no_ui_but_counting_down(i_say, i_say, chatbot, top_p, temperature, + gpt_say = yield from predict_no_ui_but_counting_down(i_say, i_say, chatbot, llm_kwargs, plugin_kwargs, history=history) # 带超时倒计时 chatbot[-1] = (i_say, gpt_say) history.append(i_say) history.append(gpt_say) - yield from update_ui(chatbot=chatbot, history=chatbot, msg=msg) + yield from update_ui(chatbot=chatbot, history=chatbot, msg=msg) # 刷新界面 res = write_results_to_file(history) chatbot.append(("完成了吗?", res)) - yield from update_ui(chatbot=chatbot, history=chatbot, msg=msg) + yield from update_ui(chatbot=chatbot, history=chatbot, msg=msg) # 刷新界面 @CatchException -def 总结word文档(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT): +def 总结word文档(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): import glob, os # 基本信息:功能、贡献者 chatbot.append([ "函数插件功能?", "批量总结Word文档。函数插件贡献者: JasonGuo1"]) - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 # 尝试导入依赖,如果缺少依赖,则给出安装建议 try: @@ -96,7 +96,7 @@ def 总结word文档(txt, top_p, temperature, chatbot, history, systemPromptTxt, report_execption(chatbot, history, a=f"解析项目: {txt}", b=f"导入软件依赖失败。使用该模块需要额外依赖,安装方法```pip install --upgrade python-docx pywin32```。") - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 return # 清空历史,以免输入溢出 @@ -108,7 +108,7 @@ def 总结word文档(txt, top_p, temperature, chatbot, history, systemPromptTxt, else: if txt == "": txt = '空空如也的输入栏' report_execption(chatbot, history, a=f"解析项目: {txt}", b=f"找不到本地项目或无权访问: {txt}") - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 return # 搜索需要处理的文件清单 @@ -121,8 +121,8 @@ def 总结word文档(txt, top_p, temperature, chatbot, history, systemPromptTxt, # 如果没找到任何文件 if len(file_manifest) == 0: report_execption(chatbot, history, a=f"解析项目: {txt}", b=f"找不到任何.docx或doc文件: {txt}") - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 return # 开始正式执行任务 - yield from 解析docx(file_manifest, project_folder, top_p, temperature, chatbot, history, systemPromptTxt) + yield from 解析docx(file_manifest, project_folder, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt) diff --git a/crazy_functions/批量总结PDF文档.py b/crazy_functions/批量总结PDF文档.py index aff64c9..4fa06d9 100644 --- a/crazy_functions/批量总结PDF文档.py +++ b/crazy_functions/批量总结PDF文档.py @@ -58,7 +58,7 @@ def clean_text(raw_text): return final_text.strip() -def 解析PDF(file_manifest, project_folder, top_p, temperature, chatbot, history, systemPromptTxt): +def 解析PDF(file_manifest, project_folder, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt): import time, glob, os, fitz print('begin analysis on:', file_manifest) for index, fp in enumerate(file_manifest): @@ -73,45 +73,45 @@ def 解析PDF(file_manifest, project_folder, top_p, temperature, chatbot, histor i_say = prefix + f'请对下面的文章片段用中文做一个概述,文件名是{os.path.relpath(fp, project_folder)},文章内容是 ```{file_content}```' i_say_show_user = prefix + f'[{index}/{len(file_manifest)}] 请对下面的文章片段做一个概述: {os.path.abspath(fp)}' chatbot.append((i_say_show_user, "[Local Message] waiting gpt response.")) - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 if not fast_debug: msg = '正常' # ** gpt request ** - gpt_say = yield from predict_no_ui_but_counting_down(i_say, i_say_show_user, chatbot, top_p, temperature, history=[]) # 带超时倒计时 + gpt_say = yield from predict_no_ui_but_counting_down(i_say, i_say_show_user, chatbot, llm_kwargs, plugin_kwargs, history=[]) # 带超时倒计时 chatbot[-1] = (i_say_show_user, gpt_say) history.append(i_say_show_user); history.append(gpt_say) - yield from update_ui(chatbot=chatbot, history=chatbot, msg=msg) + yield from update_ui(chatbot=chatbot, history=chatbot, msg=msg) # 刷新界面 if not fast_debug: time.sleep(2) all_file = ', '.join([os.path.relpath(fp, project_folder) for index, fp in enumerate(file_manifest)]) i_say = f'根据以上你自己的分析,对全文进行概括,用学术性语言写一段中文摘要,然后再写一段英文摘要(包括{all_file})。' chatbot.append((i_say, "[Local Message] waiting gpt response.")) - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 if not fast_debug: msg = '正常' # ** gpt request ** - gpt_say = yield from predict_no_ui_but_counting_down(i_say, i_say, chatbot, top_p, temperature, history=history) # 带超时倒计时 + gpt_say = yield from predict_no_ui_but_counting_down(i_say, i_say, chatbot, llm_kwargs, plugin_kwargs, history=history) # 带超时倒计时 chatbot[-1] = (i_say, gpt_say) history.append(i_say); history.append(gpt_say) - yield from update_ui(chatbot=chatbot, history=chatbot, msg=msg) + yield from update_ui(chatbot=chatbot, history=chatbot, msg=msg) # 刷新界面 res = write_results_to_file(history) chatbot.append(("完成了吗?", res)) - yield from update_ui(chatbot=chatbot, history=chatbot, msg=msg) + yield from update_ui(chatbot=chatbot, history=chatbot, msg=msg) # 刷新界面 @CatchException -def 批量总结PDF文档(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT): +def 批量总结PDF文档(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): import glob, os # 基本信息:功能、贡献者 chatbot.append([ "函数插件功能?", "批量总结PDF文档。函数插件贡献者: ValeriaWong,Eralien"]) - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 # 尝试导入依赖,如果缺少依赖,则给出安装建议 try: @@ -120,7 +120,7 @@ def 批量总结PDF文档(txt, top_p, temperature, chatbot, history, systemPromp report_execption(chatbot, history, a = f"解析项目: {txt}", b = f"导入软件依赖失败。使用该模块需要额外依赖,安装方法```pip install --upgrade pymupdf```。") - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 return # 清空历史,以免输入溢出 @@ -132,7 +132,7 @@ def 批量总结PDF文档(txt, top_p, temperature, chatbot, history, systemPromp else: if txt == "": txt = '空空如也的输入栏' report_execption(chatbot, history, a = f"解析项目: {txt}", b = f"找不到本地项目或无权访问: {txt}") - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 return # 搜索需要处理的文件清单 @@ -144,8 +144,8 @@ def 批量总结PDF文档(txt, top_p, temperature, chatbot, history, systemPromp # 如果没找到任何文件 if len(file_manifest) == 0: report_execption(chatbot, history, a = f"解析项目: {txt}", b = f"找不到任何.tex或.pdf文件: {txt}") - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 return # 开始正式执行任务 - yield from 解析PDF(file_manifest, project_folder, top_p, temperature, chatbot, history, systemPromptTxt) + yield from 解析PDF(file_manifest, project_folder, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt) diff --git a/crazy_functions/批量总结PDF文档pdfminer.py b/crazy_functions/批量总结PDF文档pdfminer.py index d9841d4..396a1f0 100644 --- a/crazy_functions/批量总结PDF文档pdfminer.py +++ b/crazy_functions/批量总结PDF文档pdfminer.py @@ -62,7 +62,7 @@ def readPdf(pdfPath): return outTextList -def 解析Paper(file_manifest, project_folder, top_p, temperature, chatbot, history, systemPromptTxt): +def 解析Paper(file_manifest, project_folder, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt): import time, glob, os from bs4 import BeautifulSoup print('begin analysis on:', file_manifest) @@ -78,39 +78,39 @@ def 解析Paper(file_manifest, project_folder, top_p, temperature, chatbot, hist i_say = prefix + f'请对下面的文章片段用中文做一个概述,文件名是{os.path.relpath(fp, project_folder)},文章内容是 ```{file_content}```' i_say_show_user = prefix + f'[{index}/{len(file_manifest)}] 请对下面的文章片段做一个概述: {os.path.abspath(fp)}' chatbot.append((i_say_show_user, "[Local Message] waiting gpt response.")) - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 if not fast_debug: msg = '正常' # ** gpt request ** - gpt_say = yield from predict_no_ui_but_counting_down(i_say, i_say_show_user, chatbot, top_p, temperature, history=[]) # 带超时倒计时 + gpt_say = yield from predict_no_ui_but_counting_down(i_say, i_say_show_user, chatbot, llm_kwargs, plugin_kwargs, history=[]) # 带超时倒计时 chatbot[-1] = (i_say_show_user, gpt_say) history.append(i_say_show_user); history.append(gpt_say) - yield from update_ui(chatbot=chatbot, history=chatbot, msg=msg) + yield from update_ui(chatbot=chatbot, history=chatbot, msg=msg) # 刷新界面 if not fast_debug: time.sleep(2) all_file = ', '.join([os.path.relpath(fp, project_folder) for index, fp in enumerate(file_manifest)]) i_say = f'根据以上你自己的分析,对全文进行概括,用学术性语言写一段中文摘要,然后再写一段英文摘要(包括{all_file})。' chatbot.append((i_say, "[Local Message] waiting gpt response.")) - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 if not fast_debug: msg = '正常' # ** gpt request ** - gpt_say = yield from predict_no_ui_but_counting_down(i_say, i_say, chatbot, top_p, temperature, history=history) # 带超时倒计时 + gpt_say = yield from predict_no_ui_but_counting_down(i_say, i_say, chatbot, llm_kwargs, plugin_kwargs, history=history) # 带超时倒计时 chatbot[-1] = (i_say, gpt_say) history.append(i_say); history.append(gpt_say) - yield from update_ui(chatbot=chatbot, history=chatbot, msg=msg) + yield from update_ui(chatbot=chatbot, history=chatbot, msg=msg) # 刷新界面 res = write_results_to_file(history) chatbot.append(("完成了吗?", res)) - yield from update_ui(chatbot=chatbot, history=chatbot, msg=msg) + yield from update_ui(chatbot=chatbot, history=chatbot, msg=msg) # 刷新界面 @CatchException -def 批量总结PDF文档pdfminer(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT): +def 批量总结PDF文档pdfminer(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): history = [] # 清空历史,以免输入溢出 import glob, os @@ -118,7 +118,7 @@ def 批量总结PDF文档pdfminer(txt, top_p, temperature, chatbot, history, sys chatbot.append([ "函数插件功能?", "批量总结PDF文档,此版本使用pdfminer插件,带token约简功能。函数插件贡献者: Euclid-Jie。"]) - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 # 尝试导入依赖,如果缺少依赖,则给出安装建议 try: @@ -127,14 +127,14 @@ def 批量总结PDF文档pdfminer(txt, top_p, temperature, chatbot, history, sys report_execption(chatbot, history, a = f"解析项目: {txt}", b = f"导入软件依赖失败。使用该模块需要额外依赖,安装方法```pip install --upgrade pdfminer beautifulsoup4```。") - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 return if os.path.exists(txt): project_folder = txt else: if txt == "": txt = '空空如也的输入栏' report_execption(chatbot, history, a = f"解析项目: {txt}", b = f"找不到本地项目或无权访问: {txt}") - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 return file_manifest = [f for f in glob.glob(f'{project_folder}/**/*.tex', recursive=True)] + \ [f for f in glob.glob(f'{project_folder}/**/*.pdf', recursive=True)] # + \ @@ -142,7 +142,7 @@ def 批量总结PDF文档pdfminer(txt, top_p, temperature, chatbot, history, sys # [f for f in glob.glob(f'{project_folder}/**/*.c', recursive=True)] if len(file_manifest) == 0: report_execption(chatbot, history, a = f"解析项目: {txt}", b = f"找不到任何.tex或pdf文件: {txt}") - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 return - yield from 解析Paper(file_manifest, project_folder, top_p, temperature, chatbot, history, systemPromptTxt) + yield from 解析Paper(file_manifest, project_folder, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt) diff --git a/crazy_functions/批量翻译PDF文档_多线程.py b/crazy_functions/批量翻译PDF文档_多线程.py index 3b9eb70..7121b46 100644 --- a/crazy_functions/批量翻译PDF文档_多线程.py +++ b/crazy_functions/批量翻译PDF文档_多线程.py @@ -89,7 +89,7 @@ def read_and_clean_pdf_text(fp): @CatchException -def 批量翻译PDF文档(txt, top_p, temperature, chatbot, history, sys_prompt, WEB_PORT): +def 批量翻译PDF文档(txt, llm_kwargs, plugin_kwargs, chatbot, history, sys_prompt, web_port): import glob import os @@ -97,7 +97,7 @@ def 批量翻译PDF文档(txt, top_p, temperature, chatbot, history, sys_prompt, chatbot.append([ "函数插件功能?", "批量总结PDF文档。函数插件贡献者: Binary-Husky(二进制哈士奇)"]) - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 # 尝试导入依赖,如果缺少依赖,则给出安装建议 try: @@ -107,7 +107,7 @@ def 批量翻译PDF文档(txt, top_p, temperature, chatbot, history, sys_prompt, report_execption(chatbot, history, a=f"解析项目: {txt}", b=f"导入软件依赖失败。使用该模块需要额外依赖,安装方法```pip install --upgrade pymupdf tiktoken```。") - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 return # 清空历史,以免输入溢出 @@ -121,7 +121,7 @@ def 批量翻译PDF文档(txt, top_p, temperature, chatbot, history, sys_prompt, txt = '空空如也的输入栏' report_execption(chatbot, history, a=f"解析项目: {txt}", b=f"找不到本地项目或无权访问: {txt}") - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 return # 搜索需要处理的文件清单 @@ -132,14 +132,14 @@ def 批量翻译PDF文档(txt, top_p, temperature, chatbot, history, sys_prompt, if len(file_manifest) == 0: report_execption(chatbot, history, a=f"解析项目: {txt}", b=f"找不到任何.tex或.pdf文件: {txt}") - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 return # 开始正式执行任务 - yield from 解析PDF(file_manifest, project_folder, top_p, temperature, chatbot, history, sys_prompt) + yield from 解析PDF(file_manifest, project_folder, llm_kwargs, plugin_kwargs, chatbot, history, sys_prompt) -def 解析PDF(file_manifest, project_folder, top_p, temperature, chatbot, history, sys_prompt): +def 解析PDF(file_manifest, project_folder, llm_kwargs, plugin_kwargs, chatbot, history, sys_prompt): import os import tiktoken TOKEN_LIMIT_PER_FRAGMENT = 1600 @@ -164,7 +164,7 @@ def 解析PDF(file_manifest, project_folder, top_p, temperature, chatbot, histor paper_meta_info = yield from request_gpt_model_in_new_thread_with_ui_alive( inputs=f"以下是一篇学术论文的基础信息,请从中提取出“标题”、“收录会议或期刊”、“作者”、“摘要”、“编号”、“作者邮箱”这六个部分。请用markdown格式输出,最后用中文翻译摘要部分。请提取:{paper_meta}", inputs_show_user=f"请从{fp}中提取出“标题”、“收录会议或期刊”等基本信息。", - top_p=top_p, temperature=temperature, + llm_kwargs=llm_kwargs, chatbot=chatbot, history=[], sys_prompt="Your job is to collect information from materials。", ) @@ -173,7 +173,7 @@ def 解析PDF(file_manifest, project_folder, top_p, temperature, chatbot, histor inputs_array=[ f"以下是你需要翻译的文章段落:\n{frag}" for frag in paper_fragments], inputs_show_user_array=[f"" for _ in paper_fragments], - top_p=top_p, temperature=temperature, + llm_kwargs=llm_kwargs, chatbot=chatbot, history_array=[[paper_meta] for _ in paper_fragments], sys_prompt_array=[ @@ -189,7 +189,7 @@ def 解析PDF(file_manifest, project_folder, top_p, temperature, chatbot, histor f'./gpt_log/{create_report_file_name}') chatbot.append((f"{fp}完成了吗?", res)) msg = "完成" - yield from update_ui(chatbot=chatbot, history=chatbot, msg=msg) + yield from update_ui(chatbot=chatbot, history=chatbot, msg=msg) # 刷新界面 # 准备文件的下载 import shutil @@ -202,4 +202,4 @@ def 解析PDF(file_manifest, project_folder, top_p, temperature, chatbot, histor if os.path.exists(pdf_path): os.remove(pdf_path) chatbot.append(("给出输出文件清单", str(generated_conclusion_files))) - yield from update_ui(chatbot=chatbot, history=chatbot, msg=msg) + yield from update_ui(chatbot=chatbot, history=chatbot, msg=msg) # 刷新界面 diff --git a/crazy_functions/理解PDF文档内容.py b/crazy_functions/理解PDF文档内容.py index a3ac233..4ad3dbf 100644 --- a/crazy_functions/理解PDF文档内容.py +++ b/crazy_functions/理解PDF文档内容.py @@ -58,7 +58,7 @@ def clean_text(raw_text): return final_text.strip() -def 解析PDF(file_name, top_p, temperature, chatbot, history, systemPromptTxt): +def 解析PDF(file_name, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt): import time, glob, os, fitz print('begin analysis on:', file_name) @@ -82,37 +82,37 @@ def 解析PDF(file_name, top_p, temperature, chatbot, history, systemPromptTxt): i_say = f'你只需要回答“接受完成”。文章内容第{i+1}/{split_group}部分是 ```{file_content[i*split_number:(i+1)*split_number]}```' i_say_show_user = f'当前发送{i+1}/{split_group}部分' chatbot.append((i_say_show_user, "[Local Message] waiting gpt response.")) - gpt_say = yield from predict_no_ui_but_counting_down(i_say, i_say_show_user, chatbot, top_p, temperature, history=[]) # 带超时倒计时 + gpt_say = yield from predict_no_ui_but_counting_down(i_say, i_say_show_user, chatbot, llm_kwargs, plugin_kwargs, history=[]) # 带超时倒计时 while "完成" not in gpt_say: i_say = f'你只需要回答“接受完成”。文章内容第{i+1}/{split_group}部分是 ```{file_content[i*split_number:(i+1)*split_number]}```' i_say_show_user = f'出现error,重新发送{i+1}/{split_group}部分' - gpt_say = yield from predict_no_ui_but_counting_down(i_say, i_say_show_user, chatbot, top_p, temperature, history=[]) # 带超时倒计时 + gpt_say = yield from predict_no_ui_but_counting_down(i_say, i_say_show_user, chatbot, llm_kwargs, plugin_kwargs, history=[]) # 带超时倒计时 time.sleep(1) chatbot[-1] = (i_say_show_user, gpt_say) history.append(i_say_show_user); history.append(gpt_say) - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 time.sleep(2) i_say = f'接下来,请你扮演一名专业的学术教授,利用你的所有知识并且结合这篇文章,回答我的问题。(请牢记:1.直到我说“退出”,你才能结束任务;2.所有问题需要紧密围绕文章内容;3.如果有公式,请使用tex渲染)' chatbot.append((i_say, "[Local Message] waiting gpt response.")) - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 # ** gpt request ** - gpt_say = yield from predict_no_ui_but_counting_down(i_say, i_say, chatbot, top_p, temperature, history=history) # 带超时倒计时 + gpt_say = yield from predict_no_ui_but_counting_down(i_say, i_say, chatbot, llm_kwargs, plugin_kwargs, history=history) # 带超时倒计时 chatbot[-1] = (i_say, gpt_say) history.append(i_say); history.append(gpt_say) - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 @CatchException -def 理解PDF文档内容(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT): +def 理解PDF文档内容(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): import glob, os # 基本信息:功能、贡献者 chatbot.append([ "函数插件功能?", "理解PDF论文内容,并且将结合上下文内容,进行学术解答。函数插件贡献者: Hanzoe。"]) - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 import tkinter as tk from tkinter import filedialog @@ -128,26 +128,26 @@ def 理解PDF文档内容(txt, top_p, temperature, chatbot, history, systemPromp report_execption(chatbot, history, a = f"解析项目: {txt}", b = f"导入软件依赖失败。使用该模块需要额外依赖,安装方法```pip install --upgrade pymupdf```。") - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 return # 清空历史,以免输入溢出 history = [] # 开始正式执行任务 - yield from 解析PDF(txt, top_p, temperature, chatbot, history, systemPromptTxt) + yield from 解析PDF(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt) @CatchException -def 理解PDF文档内容标准文件输入(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT): +def 理解PDF文档内容标准文件输入(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): import glob, os # 基本信息:功能、贡献者 chatbot.append([ "函数插件功能?", "理解PDF论文内容,并且将结合上下文内容,进行学术解答。函数插件贡献者: Hanzoe。"]) - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 # 尝试导入依赖,如果缺少依赖,则给出安装建议 try: @@ -156,7 +156,7 @@ def 理解PDF文档内容标准文件输入(txt, top_p, temperature, chatbot, hi report_execption(chatbot, history, a = f"解析项目: {txt}", b = f"导入软件依赖失败。使用该模块需要额外依赖,安装方法```pip install --upgrade pymupdf```。") - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 return # 清空历史,以免输入溢出 @@ -170,7 +170,7 @@ def 理解PDF文档内容标准文件输入(txt, top_p, temperature, chatbot, hi txt = '空空如也的输入栏' report_execption(chatbot, history, a=f"解析项目: {txt}", b=f"找不到本地项目或无权访问: {txt}") - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 return # 搜索需要处理的文件清单 @@ -179,8 +179,8 @@ def 理解PDF文档内容标准文件输入(txt, top_p, temperature, chatbot, hi if len(file_manifest) == 0: report_execption(chatbot, history, a=f"解析项目: {txt}", b=f"找不到任何.tex或.pdf文件: {txt}") - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 return txt = file_manifest[0] # 开始正式执行任务 - yield from 解析PDF(txt, top_p, temperature, chatbot, history, systemPromptTxt) + yield from 解析PDF(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt) diff --git a/crazy_functions/生成函数注释.py b/crazy_functions/生成函数注释.py index c645b66..5cf25f2 100644 --- a/crazy_functions/生成函数注释.py +++ b/crazy_functions/生成函数注释.py @@ -4,7 +4,7 @@ from toolbox import CatchException, report_execption, write_results_to_file, pre fast_debug = False -def 生成函数注释(file_manifest, project_folder, top_p, temperature, chatbot, history, systemPromptTxt): +def 生成函数注释(file_manifest, project_folder, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt): import time, glob, os print('begin analysis on:', file_manifest) for index, fp in enumerate(file_manifest): @@ -14,27 +14,27 @@ def 生成函数注释(file_manifest, project_folder, top_p, temperature, chatbo i_say = f'请对下面的程序文件做一个概述,并对文件中的所有函数生成注释,使用markdown表格输出结果,文件名是{os.path.relpath(fp, project_folder)},文件内容是 ```{file_content}```' i_say_show_user = f'[{index}/{len(file_manifest)}] 请对下面的程序文件做一个概述,并对文件中的所有函数生成注释: {os.path.abspath(fp)}' chatbot.append((i_say_show_user, "[Local Message] waiting gpt response.")) - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 if not fast_debug: msg = '正常' # ** gpt request ** - gpt_say = yield from predict_no_ui_but_counting_down(i_say, i_say_show_user, chatbot, top_p, temperature, history=[]) # 带超时倒计时 + gpt_say = yield from predict_no_ui_but_counting_down(i_say, i_say_show_user, chatbot, llm_kwargs, plugin_kwargs, history=[]) # 带超时倒计时 chatbot[-1] = (i_say_show_user, gpt_say) history.append(i_say_show_user); history.append(gpt_say) - yield from update_ui(chatbot=chatbot, history=chatbot, msg=msg) + yield from update_ui(chatbot=chatbot, history=chatbot, msg=msg) # 刷新界面 if not fast_debug: time.sleep(2) if not fast_debug: res = write_results_to_file(history) chatbot.append(("完成了吗?", res)) - yield from update_ui(chatbot=chatbot, history=chatbot, msg=msg) + yield from update_ui(chatbot=chatbot, history=chatbot, msg=msg) # 刷新界面 @CatchException -def 批量生成函数注释(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT): +def 批量生成函数注释(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): history = [] # 清空历史,以免输入溢出 import glob, os if os.path.exists(txt): @@ -42,13 +42,13 @@ def 批量生成函数注释(txt, top_p, temperature, chatbot, history, systemPr else: if txt == "": txt = '空空如也的输入栏' report_execption(chatbot, history, a = f"解析项目: {txt}", b = f"找不到本地项目或无权访问: {txt}") - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 return file_manifest = [f for f in glob.glob(f'{project_folder}/**/*.py', recursive=True)] + \ [f for f in glob.glob(f'{project_folder}/**/*.cpp', recursive=True)] if len(file_manifest) == 0: report_execption(chatbot, history, a = f"解析项目: {txt}", b = f"找不到任何.tex文件: {txt}") - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 return - yield from 生成函数注释(file_manifest, project_folder, top_p, temperature, chatbot, history, systemPromptTxt) + yield from 生成函数注释(file_manifest, project_folder, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt) diff --git a/crazy_functions/解析项目源代码.py b/crazy_functions/解析项目源代码.py index 1b182ec..7c8f6ce 100644 --- a/crazy_functions/解析项目源代码.py +++ b/crazy_functions/解析项目源代码.py @@ -1,7 +1,7 @@ from toolbox import update_ui from toolbox import CatchException, report_execption, write_results_to_file, predict_no_ui_but_counting_down -def 解析源代码新(file_manifest, project_folder, top_p, temperature, chatbot, history, systemPromptTxt): +def 解析源代码新(file_manifest, project_folder, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt): import os, copy from .crazy_utils import request_gpt_model_multi_threads_with_very_awesome_ui_and_high_efficiency from .crazy_utils import request_gpt_model_in_new_thread_with_ui_alive, WithRetry @@ -30,8 +30,7 @@ def 解析源代码新(file_manifest, project_folder, top_p, temperature, chatbo inputs_show_user_array = inputs_show_user_array, history_array = history_array, sys_prompt_array = sys_prompt_array, - top_p = top_p, - temperature = temperature, + llm_kwargs = llm_kwargs, chatbot = chatbot, show_user_at_complete = True ) @@ -40,7 +39,7 @@ def 解析源代码新(file_manifest, project_folder, top_p, temperature, chatbo history_to_return = report_part_1 res = write_results_to_file(report_part_1) chatbot.append(("完成?", "逐个文件分析已完成。" + res + "\n\n正在开始汇总。")) - yield from update_ui(chatbot=chatbot, history=history_to_return) + yield from update_ui(chatbot=chatbot, history=history_to_return) # 刷新界面 ############################## <第二步,综合,单线程,分组+迭代处理> ################################## batchsize = 16 # 10个文件为一组 @@ -62,7 +61,7 @@ def 解析源代码新(file_manifest, project_folder, top_p, temperature, chatbo this_iteration_history = copy.deepcopy(this_iteration_gpt_response_collection) this_iteration_history.extend(report_part_2) result = yield from request_gpt_model_in_new_thread_with_ui_alive( - inputs=i_say, inputs_show_user=inputs_show_user, top_p=top_p, temperature=temperature, chatbot=chatbot, + inputs=i_say, inputs_show_user=inputs_show_user, llm_kwargs=llm_kwargs, chatbot=chatbot, history=this_iteration_history, # 迭代之前的分析 sys_prompt="你是一个程序架构分析师,正在分析一个源代码项目。") report_part_2.extend([i_say, result]) @@ -74,11 +73,11 @@ def 解析源代码新(file_manifest, project_folder, top_p, temperature, chatbo history_to_return.extend(report_part_2) res = write_results_to_file(history_to_return) chatbot.append(("完成了吗?", res)) - yield from update_ui(chatbot=chatbot, history=history_to_return) + yield from update_ui(chatbot=chatbot, history=history_to_return) # 刷新界面 @CatchException -def 解析项目本身(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT): +def 解析项目本身(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): history = [] # 清空历史,以免输入溢出 import glob file_manifest = [f for f in glob.glob('./*.py') if ('test_project' not in f) and ('gpt_log' not in f)] + \ @@ -87,12 +86,12 @@ def 解析项目本身(txt, top_p, temperature, chatbot, history, systemPromptTx project_folder = './' if len(file_manifest) == 0: report_execption(chatbot, history, a = f"解析项目: {txt}", b = f"找不到任何python文件: {txt}") - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 return - yield from 解析源代码新(file_manifest, project_folder, top_p, temperature, chatbot, history, systemPromptTxt) + yield from 解析源代码新(file_manifest, project_folder, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt) @CatchException -def 解析一个Python项目(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT): +def 解析一个Python项目(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): history = [] # 清空历史,以免输入溢出 import glob, os if os.path.exists(txt): @@ -100,18 +99,18 @@ def 解析一个Python项目(txt, top_p, temperature, chatbot, history, systemPr else: if txt == "": txt = '空空如也的输入栏' report_execption(chatbot, history, a = f"解析项目: {txt}", b = f"找不到本地项目或无权访问: {txt}") - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 return file_manifest = [f for f in glob.glob(f'{project_folder}/**/*.py', recursive=True)] if len(file_manifest) == 0: report_execption(chatbot, history, a = f"解析项目: {txt}", b = f"找不到任何python文件: {txt}") - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 return - yield from 解析源代码新(file_manifest, project_folder, top_p, temperature, chatbot, history, systemPromptTxt) + yield from 解析源代码新(file_manifest, project_folder, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt) @CatchException -def 解析一个C项目的头文件(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT): +def 解析一个C项目的头文件(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): history = [] # 清空历史,以免输入溢出 import glob, os if os.path.exists(txt): @@ -119,19 +118,19 @@ def 解析一个C项目的头文件(txt, top_p, temperature, chatbot, history, s else: if txt == "": txt = '空空如也的输入栏' report_execption(chatbot, history, a = f"解析项目: {txt}", b = f"找不到本地项目或无权访问: {txt}") - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 return file_manifest = [f for f in glob.glob(f'{project_folder}/**/*.h', recursive=True)] + \ [f for f in glob.glob(f'{project_folder}/**/*.hpp', recursive=True)] #+ \ # [f for f in glob.glob(f'{project_folder}/**/*.c', recursive=True)] if len(file_manifest) == 0: report_execption(chatbot, history, a = f"解析项目: {txt}", b = f"找不到任何.h头文件: {txt}") - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 return - yield from 解析源代码新(file_manifest, project_folder, top_p, temperature, chatbot, history, systemPromptTxt) + yield from 解析源代码新(file_manifest, project_folder, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt) @CatchException -def 解析一个C项目(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT): +def 解析一个C项目(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): history = [] # 清空历史,以免输入溢出 import glob, os if os.path.exists(txt): @@ -139,7 +138,7 @@ def 解析一个C项目(txt, top_p, temperature, chatbot, history, systemPromptT else: if txt == "": txt = '空空如也的输入栏' report_execption(chatbot, history, a = f"解析项目: {txt}", b = f"找不到本地项目或无权访问: {txt}") - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 return file_manifest = [f for f in glob.glob(f'{project_folder}/**/*.h', recursive=True)] + \ [f for f in glob.glob(f'{project_folder}/**/*.cpp', recursive=True)] + \ @@ -147,13 +146,13 @@ def 解析一个C项目(txt, top_p, temperature, chatbot, history, systemPromptT [f for f in glob.glob(f'{project_folder}/**/*.c', recursive=True)] if len(file_manifest) == 0: report_execption(chatbot, history, a = f"解析项目: {txt}", b = f"找不到任何.h头文件: {txt}") - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 return - yield from 解析源代码新(file_manifest, project_folder, top_p, temperature, chatbot, history, systemPromptTxt) + yield from 解析源代码新(file_manifest, project_folder, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt) @CatchException -def 解析一个Java项目(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT): +def 解析一个Java项目(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): history = [] # 清空历史,以免输入溢出 import glob, os if os.path.exists(txt): @@ -161,7 +160,7 @@ def 解析一个Java项目(txt, top_p, temperature, chatbot, history, systemProm else: if txt == "": txt = '空空如也的输入栏' report_execption(chatbot, history, a=f"解析项目: {txt}", b=f"找不到本地项目或无权访问: {txt}") - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 return file_manifest = [f for f in glob.glob(f'{project_folder}/**/*.java', recursive=True)] + \ [f for f in glob.glob(f'{project_folder}/**/*.jar', recursive=True)] + \ @@ -169,13 +168,13 @@ def 解析一个Java项目(txt, top_p, temperature, chatbot, history, systemProm [f for f in glob.glob(f'{project_folder}/**/*.sh', recursive=True)] if len(file_manifest) == 0: report_execption(chatbot, history, a=f"解析项目: {txt}", b=f"找不到任何java文件: {txt}") - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 return - yield from 解析源代码新(file_manifest, project_folder, top_p, temperature, chatbot, history, systemPromptTxt) + yield from 解析源代码新(file_manifest, project_folder, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt) @CatchException -def 解析一个Rect项目(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT): +def 解析一个Rect项目(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): history = [] # 清空历史,以免输入溢出 import glob, os if os.path.exists(txt): @@ -183,7 +182,7 @@ def 解析一个Rect项目(txt, top_p, temperature, chatbot, history, systemProm else: if txt == "": txt = '空空如也的输入栏' report_execption(chatbot, history, a=f"解析项目: {txt}", b=f"找不到本地项目或无权访问: {txt}") - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 return file_manifest = [f for f in glob.glob(f'{project_folder}/**/*.ts', recursive=True)] + \ [f for f in glob.glob(f'{project_folder}/**/*.tsx', recursive=True)] + \ @@ -192,13 +191,13 @@ def 解析一个Rect项目(txt, top_p, temperature, chatbot, history, systemProm [f for f in glob.glob(f'{project_folder}/**/*.jsx', recursive=True)] if len(file_manifest) == 0: report_execption(chatbot, history, a=f"解析项目: {txt}", b=f"找不到任何Rect文件: {txt}") - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 return - yield from 解析源代码新(file_manifest, project_folder, top_p, temperature, chatbot, history, systemPromptTxt) + yield from 解析源代码新(file_manifest, project_folder, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt) @CatchException -def 解析一个Golang项目(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT): +def 解析一个Golang项目(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): history = [] # 清空历史,以免输入溢出 import glob, os if os.path.exists(txt): @@ -206,11 +205,11 @@ def 解析一个Golang项目(txt, top_p, temperature, chatbot, history, systemPr else: if txt == "": txt = '空空如也的输入栏' report_execption(chatbot, history, a=f"解析项目: {txt}", b=f"找不到本地项目或无权访问: {txt}") - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 return file_manifest = [f for f in glob.glob(f'{project_folder}/**/*.go', recursive=True)] if len(file_manifest) == 0: report_execption(chatbot, history, a=f"解析项目: {txt}", b=f"找不到任何golang文件: {txt}") - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 return - yield from 解析源代码新(file_manifest, project_folder, top_p, temperature, chatbot, history, systemPromptTxt) + yield from 解析源代码新(file_manifest, project_folder, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt) diff --git a/crazy_functions/读文章写摘要.py b/crazy_functions/读文章写摘要.py index 5f6715c..af57c84 100644 --- a/crazy_functions/读文章写摘要.py +++ b/crazy_functions/读文章写摘要.py @@ -4,7 +4,7 @@ from toolbox import CatchException, report_execption, write_results_to_file, pre fast_debug = False -def 解析Paper(file_manifest, project_folder, top_p, temperature, chatbot, history, systemPromptTxt): +def 解析Paper(file_manifest, project_folder, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt): import time, glob, os print('begin analysis on:', file_manifest) for index, fp in enumerate(file_manifest): @@ -15,39 +15,39 @@ def 解析Paper(file_manifest, project_folder, top_p, temperature, chatbot, hist i_say = prefix + f'请对下面的文章片段用中文做一个概述,文件名是{os.path.relpath(fp, project_folder)},文章内容是 ```{file_content}```' i_say_show_user = prefix + f'[{index}/{len(file_manifest)}] 请对下面的文章片段做一个概述: {os.path.abspath(fp)}' chatbot.append((i_say_show_user, "[Local Message] waiting gpt response.")) - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 if not fast_debug: msg = '正常' # ** gpt request ** - gpt_say = yield from predict_no_ui_but_counting_down(i_say, i_say_show_user, chatbot, top_p, temperature, history=[]) # 带超时倒计时 + gpt_say = yield from predict_no_ui_but_counting_down(i_say, i_say_show_user, chatbot, llm_kwargs, plugin_kwargs, history=[]) # 带超时倒计时 chatbot[-1] = (i_say_show_user, gpt_say) history.append(i_say_show_user); history.append(gpt_say) - yield from update_ui(chatbot=chatbot, history=chatbot, msg=msg) + yield from update_ui(chatbot=chatbot, history=chatbot, msg=msg) # 刷新界面 if not fast_debug: time.sleep(2) all_file = ', '.join([os.path.relpath(fp, project_folder) for index, fp in enumerate(file_manifest)]) i_say = f'根据以上你自己的分析,对全文进行概括,用学术性语言写一段中文摘要,然后再写一段英文摘要(包括{all_file})。' chatbot.append((i_say, "[Local Message] waiting gpt response.")) - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 if not fast_debug: msg = '正常' # ** gpt request ** - gpt_say = yield from predict_no_ui_but_counting_down(i_say, i_say, chatbot, top_p, temperature, history=history) # 带超时倒计时 + gpt_say = yield from predict_no_ui_but_counting_down(i_say, i_say, chatbot, llm_kwargs, plugin_kwargs, history=history) # 带超时倒计时 chatbot[-1] = (i_say, gpt_say) history.append(i_say); history.append(gpt_say) - yield from update_ui(chatbot=chatbot, history=chatbot, msg=msg) + yield from update_ui(chatbot=chatbot, history=chatbot, msg=msg) # 刷新界面 res = write_results_to_file(history) chatbot.append(("完成了吗?", res)) - yield from update_ui(chatbot=chatbot, history=chatbot, msg=msg) + yield from update_ui(chatbot=chatbot, history=chatbot, msg=msg) # 刷新界面 @CatchException -def 读文章写摘要(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT): +def 读文章写摘要(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): history = [] # 清空历史,以免输入溢出 import glob, os if os.path.exists(txt): @@ -55,13 +55,13 @@ def 读文章写摘要(txt, top_p, temperature, chatbot, history, systemPromptTx else: if txt == "": txt = '空空如也的输入栏' report_execption(chatbot, history, a = f"解析项目: {txt}", b = f"找不到本地项目或无权访问: {txt}") - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 return file_manifest = [f for f in glob.glob(f'{project_folder}/**/*.tex', recursive=True)] # + \ # [f for f in glob.glob(f'{project_folder}/**/*.cpp', recursive=True)] + \ # [f for f in glob.glob(f'{project_folder}/**/*.c', recursive=True)] if len(file_manifest) == 0: report_execption(chatbot, history, a = f"解析项目: {txt}", b = f"找不到任何.tex文件: {txt}") - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 return - yield from 解析Paper(file_manifest, project_folder, top_p, temperature, chatbot, history, systemPromptTxt) + yield from 解析Paper(file_manifest, project_folder, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt) diff --git a/crazy_functions/谷歌检索小助手.py b/crazy_functions/谷歌检索小助手.py index 9f42da7..0590e3b 100644 --- a/crazy_functions/谷歌检索小助手.py +++ b/crazy_functions/谷歌检索小助手.py @@ -56,16 +56,16 @@ def get_meta_information(url, chatbot, history): }) chatbot[-1] = [chatbot[-1][0], title + f'\n\n是否在arxiv中(不在arxiv中无法获取完整摘要):{is_paper_in_arxiv}\n\n' + abstract] - yield from update_ui(chatbot=chatbot, history=[]) + yield from update_ui(chatbot=chatbot, history=[]) # 刷新界面 return profile @CatchException -def 谷歌检索小助手(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT): +def 谷歌检索小助手(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): # 基本信息:功能、贡献者 chatbot.append([ "函数插件功能?", "分析用户提供的谷歌学术(google scholar)搜索页面中,出现的所有文章: binary-husky,插件初始化中..."]) - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 # 尝试导入依赖,如果缺少依赖,则给出安装建议 try: @@ -75,7 +75,7 @@ def 谷歌检索小助手(txt, top_p, temperature, chatbot, history, systemPromp report_execption(chatbot, history, a = f"解析项目: {txt}", b = f"导入软件依赖失败。使用该模块需要额外依赖,安装方法```pip install --upgrade beautifulsoup4 arxiv```。") - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 return # 清空历史,以免输入溢出 @@ -91,7 +91,7 @@ def 谷歌检索小助手(txt, top_p, temperature, chatbot, history, systemPromp inputs_show_user = f"请分析此页面中出现的所有文章:{txt}" gpt_say = yield from request_gpt_model_in_new_thread_with_ui_alive( inputs=i_say, inputs_show_user=inputs_show_user, - top_p=top_p, temperature=temperature, chatbot=chatbot, history=[], + llm_kwargs=llm_kwargs, chatbot=chatbot, history=[], sys_prompt="你是一个学术翻译,请从数据中提取信息。你必须使用Markdown格式。你必须逐个文献进行处理。" ) @@ -100,7 +100,7 @@ def 谷歌检索小助手(txt, top_p, temperature, chatbot, history, systemPromp chatbot.append(["状态?", "已经全部完成"]) msg = '正常' - yield from update_ui(chatbot=chatbot, history=chatbot, msg=msg) + yield from update_ui(chatbot=chatbot, history=chatbot, msg=msg) # 刷新界面 res = write_results_to_file(history) chatbot.append(("完成了吗?", res)); - yield from update_ui(chatbot=chatbot, history=chatbot, msg=msg) + yield from update_ui(chatbot=chatbot, history=chatbot, msg=msg) # 刷新界面 diff --git a/crazy_functions/高级功能函数模板.py b/crazy_functions/高级功能函数模板.py index fefa8bb..50a5cd7 100644 --- a/crazy_functions/高级功能函数模板.py +++ b/crazy_functions/高级功能函数模板.py @@ -2,19 +2,28 @@ from toolbox import CatchException, update_ui from .crazy_utils import request_gpt_model_in_new_thread_with_ui_alive import datetime @CatchException -def 高阶功能模板函数(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT): +def 高阶功能模板函数(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): + """ + txt 输入栏用户输入的文本,例如需要翻译的一段话,再例如一个包含了待处理文件的路径 + llm_kwargs gpt模型参数,如温度和top_p等,一般原样传递下去就行 + plugin_kwargs 插件模型的参数,如温度和top_p等,一般原样传递下去就行 + chatbot 聊天显示框的句柄,用于显示给用户 + history 聊天历史,前情提要 + system_prompt 给gpt的静默提醒 + web_port 当前软件运行的端口号 + """ history = [] # 清空历史,以免输入溢出 - chatbot.append(("这是什么功能?", "[Local Message] 请注意,您正在调用一个[函数插件]的模板,该函数面向希望实现更多有趣功能的开发者,它可以作为创建新功能函数的模板(该函数只有20行代码)。此外我们也提供可同步处理大量文件的多线程Demo供您参考。您若希望分享新的功能模组,请不吝PR!")) - yield from update_ui(chatbot=chatbot, history=history) # 由于请求gpt需要一段时间,我们先及时地做一次界面更新 + chatbot.append(("这是什么功能?", "[Local Message] 请注意,您正在调用一个[函数插件]的模板,该函数面向希望实现更多有趣功能的开发者,它可以作为创建新功能函数的模板(该函数只有20多行代码)。此外我们也提供可同步处理大量文件的多线程Demo供您参考。您若希望分享新的功能模组,请不吝PR!")) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 # 由于请求gpt需要一段时间,我们先及时地做一次界面更新 for i in range(5): currentMonth = (datetime.date.today() + datetime.timedelta(days=i)).month currentDay = (datetime.date.today() + datetime.timedelta(days=i)).day i_say = f'历史中哪些事件发生在{currentMonth}月{currentDay}日?列举两条并发送相关图片。发送图片时,请使用Markdown,将Unsplash API中的PUT_YOUR_QUERY_HERE替换成描述该事件的一个最重要的单词。' gpt_say = yield from request_gpt_model_in_new_thread_with_ui_alive( inputs=i_say, inputs_show_user=i_say, - top_p=top_p, temperature=temperature, chatbot=chatbot, history=[], + llm_kwargs=llm_kwargs, chatbot=chatbot, history=[], sys_prompt="当你想发送一张照片时,请使用Markdown, 并且不要有反斜线, 不要用代码块。使用 Unsplash API (https://source.unsplash.com/1280x720/? < PUT_YOUR_QUERY_HERE >)。" ) chatbot[-1] = (i_say, gpt_say) history.append(i_say);history.append(gpt_say) - yield from update_ui(chatbot=chatbot, history=history) # 界面更新 \ No newline at end of file + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 # 界面更新 \ No newline at end of file diff --git a/main.py b/main.py index 64786d5..1520761 100644 --- a/main.py +++ b/main.py @@ -52,6 +52,7 @@ if LAYOUT == "TOP-DOWN": cancel_handles = [] with gr.Blocks(title="ChatGPT 学术优化", theme=set_theme, analytics_enabled=False, css=advanced_css) as demo: gr.HTML(title_html) + cookies = gr.State({}) with gr_L1(): with gr_L2(scale=2): chatbot = gr.Chatbot() @@ -117,8 +118,8 @@ with gr.Blocks(title="ChatGPT 学术优化", theme=set_theme, analytics_enabled= return ret checkboxes.select(fn_area_visibility, [checkboxes], [area_basic_fn, area_crazy_fn, area_input_primary, area_input_secondary, txt, txt2] ) # 整理反复出现的控件句柄组合 - input_combo = [txt, txt2, top_p, temperature, chatbot, history, system_prompt] - output_combo = [chatbot, history, status] + input_combo = [cookies, txt, txt2, top_p, temperature, chatbot, history, system_prompt] + output_combo = [cookies, chatbot, history, status] predict_args = dict(fn=ArgsGeneralWrapper(predict), inputs=input_combo, outputs=output_combo) # 提交按钮、重置按钮 cancel_handles.append(txt.submit(**predict_args)) diff --git a/request_llm/bridge_chatgpt.py b/request_llm/bridge_chatgpt.py index ee8d4d8..f3159cd 100644 --- a/request_llm/bridge_chatgpt.py +++ b/request_llm/bridge_chatgpt.py @@ -72,14 +72,14 @@ def predict_no_ui(inputs, top_p, temperature, history=[], sys_prompt=""): raise ConnectionAbortedError("Json解析不合常规,可能是文本过长" + response.text) -def predict_no_ui_long_connection(inputs, top_p, temperature, history=[], sys_prompt="", observe_window=None, console_slience=False): +def predict_no_ui_long_connection(inputs, llm_kwargs, history=[], sys_prompt="", observe_window=None, console_slience=False): """ 发送至chatGPT,等待回复,一次性完成,不显示中间过程。但内部用stream的方法避免中途网线被掐。 inputs: 是本次问询的输入 sys_prompt: 系统静默prompt - top_p, temperature: + llm_kwargs: chatGPT的内部调优参数 history: 是之前的对话列表 @@ -87,7 +87,7 @@ def predict_no_ui_long_connection(inputs, top_p, temperature, history=[], sys_pr 用于负责跨越线程传递已经输出的部分,大部分时候仅仅为了fancy的视觉效果,留空即可。observe_window[0]:观测窗。observe_window[1]:看门狗 """ watch_dog_patience = 5 # 看门狗的耐心, 设置5秒即可 - headers, payload = generate_payload(inputs, top_p, temperature, history, system_prompt=sys_prompt, stream=True) + headers, payload = generate_payload(inputs, llm_kwargs, history, system_prompt=sys_prompt, stream=True) retry = 0 while True: try: @@ -135,8 +135,7 @@ def predict_no_ui_long_connection(inputs, top_p, temperature, history=[], sys_pr return result -def predict(inputs, top_p, temperature, chatbot=[], history=[], system_prompt='', - stream = True, additional_fn=None): +def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_prompt='', stream = True, additional_fn=None): """ 发送至chatGPT,流式获取输出。 用于基础的对话功能。 @@ -157,9 +156,9 @@ def predict(inputs, top_p, temperature, chatbot=[], history=[], system_prompt='' raw_input = inputs logging.info(f'[raw_input] {raw_input}') chatbot.append((inputs, "")) - yield from update_ui(chatbot=chatbot, history=history, msg="等待响应") + yield from update_ui(chatbot=chatbot, history=history, msg="等待响应") # 刷新界面 - headers, payload = generate_payload(inputs, top_p, temperature, history, system_prompt, stream) + headers, payload = generate_payload(inputs, llm_kwargs, history, system_prompt, stream) history.append(inputs); history.append(" ") retry = 0 @@ -172,7 +171,7 @@ def predict(inputs, top_p, temperature, chatbot=[], history=[], system_prompt='' retry += 1 chatbot[-1] = ((chatbot[-1][0], timeout_bot_msg)) retry_msg = f",正在重试 ({retry}/{MAX_RETRY}) ……" if MAX_RETRY > 0 else "" - yield from update_ui(chatbot=chatbot, history=history, msg="请求超时"+retry_msg) + yield from update_ui(chatbot=chatbot, history=history, msg="请求超时"+retry_msg) # 刷新界面 if retry > MAX_RETRY: raise TimeoutError gpt_replying_buffer = "" @@ -200,11 +199,11 @@ def predict(inputs, top_p, temperature, chatbot=[], history=[], system_prompt='' gpt_replying_buffer = gpt_replying_buffer + json.loads(chunk.decode()[6:])['choices'][0]["delta"]["content"] history[-1] = gpt_replying_buffer chatbot[-1] = (history[-2], history[-1]) - yield from update_ui(chatbot=chatbot, history=history, msg=status_text) + yield from update_ui(chatbot=chatbot, history=history, msg=status_text) # 刷新界面 except Exception as e: traceback.print_exc() - yield from update_ui(chatbot=chatbot, history=history, msg="Json解析不合常规") + yield from update_ui(chatbot=chatbot, history=history, msg="Json解析不合常规") # 刷新界面 chunk = get_full_error(chunk, stream_response) error_msg = chunk.decode() if "reduce the length" in error_msg: @@ -218,10 +217,10 @@ def predict(inputs, top_p, temperature, chatbot=[], history=[], system_prompt='' from toolbox import regular_txt_to_markdown tb_str = '```\n' + traceback.format_exc() + '```' chatbot[-1] = (chatbot[-1][0], f"[Local Message] 异常 \n\n{tb_str} \n\n{regular_txt_to_markdown(chunk.decode()[4:])}") - yield from update_ui(chatbot=chatbot, history=history, msg="Json异常" + error_msg) + yield from update_ui(chatbot=chatbot, history=history, msg="Json异常" + error_msg) # 刷新界面 return -def generate_payload(inputs, top_p, temperature, history, system_prompt, stream): +def generate_payload(inputs, llm_kwargs, history, system_prompt, stream): """ 整合所有信息,选择LLM模型,生成http请求,为发送请求做准备 """ @@ -257,8 +256,8 @@ def generate_payload(inputs, top_p, temperature, history, system_prompt, stream) payload = { "model": LLM_MODEL, "messages": messages, - "temperature": temperature, # 1.0, - "top_p": top_p, # 1.0, + "temperature": llm_kwargs['temperature'], # 1.0, + "top_p": llm_kwargs['top_p'], # 1.0, "n": 1, "stream": stream, "presence_penalty": 0, diff --git a/request_llm/bridge_tgui.py b/request_llm/bridge_tgui.py index d3c0f4e..22a4075 100644 --- a/request_llm/bridge_tgui.py +++ b/request_llm/bridge_tgui.py @@ -90,7 +90,7 @@ async def run(context, max_token=512): -def predict_tgui(inputs, top_p, temperature, chatbot=[], history=[], system_prompt='', stream = True, additional_fn=None): +def predict_tgui(inputs, top_p, temperature, chatbot, history=[], system_prompt='', stream = True, additional_fn=None): """ 发送至chatGPT,流式获取输出。 用于基础的对话功能。 @@ -111,7 +111,7 @@ def predict_tgui(inputs, top_p, temperature, chatbot=[], history=[], system_prom logging.info(f'[raw_input] {raw_input}') history.extend([inputs, ""]) chatbot.append([inputs, ""]) - yield from update_ui(chatbot=chatbot, history=history, msg="等待响应") + yield from update_ui(chatbot=chatbot, history=history, msg="等待响应") # 刷新界面 prompt = inputs tgui_say = "" @@ -138,7 +138,7 @@ def predict_tgui(inputs, top_p, temperature, chatbot=[], history=[], system_prom tgui_say = mutable[0] history[-1] = tgui_say chatbot[-1] = (history[-2], history[-1]) - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 logging.info(f'[response] {tgui_say}') diff --git a/toolbox.py b/toolbox.py index 620baf9..a4d9684 100644 --- a/toolbox.py +++ b/toolbox.py @@ -9,23 +9,52 @@ import re from latex2mathml.converter import convert as tex2mathml from functools import wraps, lru_cache +############################### 插件输入输出接驳区 ####################################### +class ChatBotWithCookies(list): + def __init__(self, cookie): + self._cookie = cookie + + def write_list(self, list): + for t in list: + self.append(t) + + def get_list(self): + return [t for t in self] + + def get_cookies(self): + return self._cookie def ArgsGeneralWrapper(f): """ 装饰器函数,用于重组输入参数,改变输入参数的顺序与结构。 """ - def decorated(txt, txt2, *args, **kwargs): + def decorated(cookies, txt, txt2, top_p, temperature, chatbot, history, system_prompt, *args): txt_passon = txt - if txt == "" and txt2 != "": - txt_passon = txt2 - yield from f(txt_passon, *args, **kwargs) + if txt == "" and txt2 != "": txt_passon = txt2 + # 引入一个有cookie的chatbot + cookies.update({ + 'top_p':top_p, + 'temperature':temperature, + }) + llm_kwargs = { + 'top_p':top_p, + 'temperature':temperature, + } + plugin_kwargs = { + } + chatbot_with_cookie = ChatBotWithCookies(cookies) + chatbot_with_cookie.write_list(txt_passon) + yield from f(txt_passon, llm_kwargs, plugin_kwargs, chatbot_with_cookie, history, system_prompt, *args) return decorated -def update_ui(chatbot, history, msg='正常', *args, **kwargs): +def update_ui(chatbot, history, msg='正常', **kwargs): # 刷新界面 """ 刷新用户界面 """ - yield chatbot, history, msg + assert isinstance(chatbot, ChatBotWithCookies), "在传递chatbot的过程中不要将其丢弃。必要时,可用clear将其清空,然后用for+append循环重新赋值。" + yield chatbot.get_cookies(), chatbot.get_list(), history, msg +############################### ################## ####################################### +########################################################################################## def get_reduce_token_percent(text): """ @@ -102,7 +131,7 @@ def predict_no_ui_but_counting_down(i_say, i_say_show_user, chatbot, top_p, temp cnt += 1 chatbot[-1] = (i_say_show_user, f"[Local Message] {mutable[1]}waiting gpt response {cnt}/{TIMEOUT_SECONDS*2*(MAX_RETRY+1)}"+''.join(['.']*(cnt % 4))) - yield from update_ui(chatbot=chatbot, history=history) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 time.sleep(1) # 把gpt的输出从mutable中取出来 gpt_say = mutable[0] @@ -166,7 +195,7 @@ def CatchException(f): chatbot = [["插件调度异常", "异常原因"]] chatbot[-1] = (chatbot[-1][0], f"[Local Message] 实验性函数调用出错: \n\n{tb_str} \n\n当前代理可用性: \n\n{check_proxy(proxies)}") - yield from update_ui(chatbot=chatbot, history=history, msg=f'异常 {e}') + yield from update_ui(chatbot=chatbot, history=history, msg=f'异常 {e}') # 刷新界面 return decorated