diff --git a/config.py b/config.py index 1d2aa69..f25b119 100644 --- a/config.py +++ b/config.py @@ -200,7 +200,8 @@ PATH_LOGGING = "gpt_log" # 除了连接OpenAI之外,还有哪些场合允许使用代理,请勿修改 -WHEN_TO_USE_PROXY = ["Download_LLM", "Download_Gradio_Theme", "Connect_Grobid", "Warmup_Modules"] +WHEN_TO_USE_PROXY = ["Download_LLM", "Download_Gradio_Theme", "Connect_Grobid", + "Warmup_Modules", "Nougat_Download", "AutoGen"] # 自定义按钮的最大数量限制 diff --git a/crazy_functional.py b/crazy_functional.py index 1d8f5c7..795bd5f 100644 --- a/crazy_functional.py +++ b/crazy_functional.py @@ -539,6 +539,18 @@ def get_crazy_functions(): except: print('Load function plugin failed') + # try: + # from crazy_functions.多智能体 import 多智能体终端 + # function_plugins.update({ + # "多智能体终端(微软AutoGen)": { + # "Group": "智能体", + # "Color": "stop", + # "AsButton": True, + # "Function": HotReload(多智能体终端) + # } + # }) + # except: + # print('Load function plugin failed') # try: # from crazy_functions.chatglm微调工具 import 微调数据集生成 diff --git a/crazy_functions/agent_fns/auto_agent.py b/crazy_functions/agent_fns/auto_agent.py new file mode 100644 index 0000000..5e67115 --- /dev/null +++ b/crazy_functions/agent_fns/auto_agent.py @@ -0,0 +1,23 @@ +from toolbox import CatchException, update_ui, gen_time_str, trimmed_format_exc, ProxyNetworkActivate +from toolbox import report_execption, get_log_folder, update_ui_lastest_msg, Singleton +from crazy_functions.agent_fns.pipe import PluginMultiprocessManager, PipeCom +from crazy_functions.agent_fns.autogen_general import AutoGenGeneral +import time + + +class AutoGenMath(AutoGenGeneral): + + def define_agents(self): + from autogen import AssistantAgent, UserProxyAgent + return [ + { + "name": "assistant", # name of the agent. + "cls": AssistantAgent, # class of the agent. + }, + { + "name": "user_proxy", # name of the agent. + "cls": UserProxyAgent, # class of the agent. + "human_input_mode": "ALWAYS", # always ask for human input. + "llm_config": False, # disables llm-based auto reply. + }, + ] \ No newline at end of file diff --git a/crazy_functions/agent_fns/autogen_general.py b/crazy_functions/agent_fns/autogen_general.py new file mode 100644 index 0000000..18c89ab --- /dev/null +++ b/crazy_functions/agent_fns/autogen_general.py @@ -0,0 +1,75 @@ +from toolbox import CatchException, update_ui, gen_time_str, trimmed_format_exc, ProxyNetworkActivate +from toolbox import report_execption, get_log_folder, update_ui_lastest_msg, Singleton +from crazy_functions.agent_fns.pipe import PluginMultiprocessManager, PipeCom +import time + + +class AutoGenGeneral(PluginMultiprocessManager): + + def gpt_academic_print_override(self, user_proxy, message, sender): + # ⭐⭐ 子进程执行 + self.child_conn.send(PipeCom("show", sender.name + '\n\n---\n\n' + message['content'])) + + def gpt_academic_get_human_input(self, user_proxy, message): + # ⭐⭐ 子进程执行 + patience = 300 + begin_waiting_time = time.time() + self.child_conn.send(PipeCom("interact", message)) + while True: + time.sleep(0.5) + if self.child_conn.poll(): + wait_success = True + break + if time.time() - begin_waiting_time > patience: + self.child_conn.send(PipeCom("done", "")) + wait_success = False + break + if wait_success: + return self.child_conn.recv().content + else: + raise TimeoutError("等待用户输入超时") + + def define_agents(self): + raise NotImplementedError + + def do_audogen(self, input): + # ⭐⭐ 子进程执行 + input = input.content + with ProxyNetworkActivate("AutoGen"): + from autogen import AssistantAgent, UserProxyAgent + config_list = [{ + 'model': self.llm_kwargs['llm_model'], + 'api_key': self.llm_kwargs['api_key'], + },] + code_execution_config={"work_dir": self.autogen_work_dir, "use_docker":True} + agents = self.define_agents() + user_proxy = None + assistant = None + for agent_kwargs in agents: + agent_cls = agent_kwargs.pop('cls') + kwargs = { + 'llm_config':{ + "config_list": config_list, + }, + 'code_execution_config':code_execution_config + } + kwargs.update(agent_kwargs) + agent_handle = agent_cls(**kwargs) + agent_handle._print_received_message = lambda a,b: self.gpt_academic_print_override(agent_kwargs, a, b) + if agent_kwargs['name'] == 'user_proxy': + agent_handle.get_human_input = lambda a: self.gpt_academic_get_human_input(user_proxy, a) + user_proxy = agent_handle + if agent_kwargs['name'] == 'assistant': assistant = agent_handle + try: + if user_proxy is None or assistant is None: raise Exception("用户代理或助理代理未定义") + user_proxy.initiate_chat(assistant, message=input) + except Exception as e: + tb_str = '```\n' + trimmed_format_exc() + '```' + self.child_conn.send(PipeCom("done", "AutoGen 执行失败: \n\n" + tb_str)) + + def subprocess_worker(self, child_conn): + # ⭐⭐ 子进程执行 + self.child_conn = child_conn + while True: + msg = self.child_conn.recv() # PipeCom + self.do_audogen(msg) diff --git a/crazy_functions/agent_fns/echo_agent.py b/crazy_functions/agent_fns/echo_agent.py new file mode 100644 index 0000000..52bf72d --- /dev/null +++ b/crazy_functions/agent_fns/echo_agent.py @@ -0,0 +1,19 @@ +from crazy_functions.agent_fns.pipe import PluginMultiprocessManager, PipeCom + +class EchoDemo(PluginMultiprocessManager): + def subprocess_worker(self, child_conn): + # ⭐⭐ 子进程 + self.child_conn = child_conn + while True: + msg = self.child_conn.recv() # PipeCom + if msg.cmd == "user_input": + # wait futher user input + self.child_conn.send(PipeCom("show", msg.content)) + wait_success = self.subprocess_worker_wait_user_feedback(wait_msg="我准备好处理下一个问题了.") + if not wait_success: + # wait timeout, terminate this subprocess_worker + break + elif msg.cmd == "terminate": + self.child_conn.send(PipeCom("done", "")) + break + print('[debug] subprocess_worker terminated') \ No newline at end of file diff --git a/crazy_functions/agent_fns/persistent.py b/crazy_functions/agent_fns/persistent.py new file mode 100644 index 0000000..82c869c --- /dev/null +++ b/crazy_functions/agent_fns/persistent.py @@ -0,0 +1,16 @@ +from toolbox import Singleton +@Singleton +class GradioMultiuserManagerForPersistentClasses(): + def __init__(self): + self.mapping = {} + + def already_alive(self, key): + return (key in self.mapping) and (self.mapping[key].is_alive()) + + def set(self, key, x): + self.mapping[key] = x + return self.mapping[key] + + def get(self, key): + return self.mapping[key] + diff --git a/crazy_functions/agent_fns/pipe.py b/crazy_functions/agent_fns/pipe.py new file mode 100644 index 0000000..d28c5cc --- /dev/null +++ b/crazy_functions/agent_fns/pipe.py @@ -0,0 +1,150 @@ +from toolbox import get_log_folder, update_ui, gen_time_str, trimmed_format_exc, promote_file_to_downloadzone +import time, os + +class PipeCom(): + def __init__(self, cmd, content) -> None: + self.cmd = cmd + self.content = content + + +class PluginMultiprocessManager(): + def __init__(self, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): + # ⭐ 主进程 + self.autogen_work_dir = os.path.join(get_log_folder('autogen'), gen_time_str()) + self.previous_work_dir_files = {} + self.llm_kwargs = llm_kwargs + self.plugin_kwargs = plugin_kwargs + self.chatbot = chatbot + self.history = history + self.system_prompt = system_prompt + self.web_port = web_port + self.alive = True + + def is_alive(self): + return self.alive + + def launch_subprocess_with_pipe(self): + # ⭐ 主进程 + from multiprocessing import Process, Pipe + parent_conn, child_conn = Pipe() + self.p = Process(target=self.subprocess_worker, args=(child_conn,)) + self.p.daemon = True + self.p.start() + return parent_conn + + def terminate(self): + self.p.terminate() + self.alive = False + print('[debug] instance terminated') + + def subprocess_worker(self, child_conn): + # ⭐⭐ 子进程 + raise NotImplementedError + + def send_command(self, cmd): + # ⭐ 主进程 + self.parent_conn.send(PipeCom("user_input", cmd)) + + def immediate_showoff_when_possible(self, fp): + # ⭐ 主进程 + # 获取fp的拓展名 + file_type = fp.split('.')[-1] + # 如果是文本文件, 则直接显示文本内容 + if file_type in ['png', 'jpg']: + image_path = os.path.abspath(fp) + self.chatbot.append(['检测到新生图像:', f'本地文件预览:
']) + yield from update_ui(chatbot=self.chatbot, history=self.history) + + def overwatch_workdir_file_change(self): + # ⭐ 主进程 Docker 外挂文件夹监控 + path_to_overwatch = self.autogen_work_dir + change_list = [] + # 扫描路径下的所有文件, 并与self.previous_work_dir_files中所记录的文件进行对比, + # 如果有新文件出现,或者文件的修改时间发生变化,则更新self.previous_work_dir_files中 + # 把新文件和发生变化的文件的路径记录到 change_list 中 + for root, dirs, files in os.walk(path_to_overwatch): + for file in files: + file_path = os.path.join(root, file) + if file_path not in self.previous_work_dir_files.keys(): + last_modified_time = os.stat(file_path).st_mtime + self.previous_work_dir_files.update({file_path:last_modified_time}) + change_list.append(file_path) + else: + last_modified_time = os.stat(file_path).st_mtime + if last_modified_time != self.previous_work_dir_files[file_path]: + self.previous_work_dir_files[file_path] = last_modified_time + change_list.append(file_path) + if len(change_list) > 0: + file_links = '' + for f in change_list: + res = promote_file_to_downloadzone(f) + file_links += f'
{res}' + yield from self.immediate_showoff_when_possible(file_path) + + self.chatbot.append(['检测到新生文档.', f'文档清单如下: {file_links}']) + yield from update_ui(chatbot=self.chatbot, history=self.history) + return change_list + + + def main_process_ui_control(self, txt, create_or_resume) -> str: + # ⭐ 主进程 + if create_or_resume == 'create': + self.cnt = 1 + self.parent_conn = self.launch_subprocess_with_pipe() # ⭐⭐⭐ + self.send_command(txt) + if txt == 'exit': + self.chatbot.append([f"结束", "结束信号已明确,终止AutoGen程序。"]) + yield from update_ui(chatbot=self.chatbot, history=self.history) + self.terminate() + return "terminate" + + while True: + time.sleep(0.5) + if self.parent_conn.poll(): + if '[GPT-Academic] 等待中' in self.chatbot[-1][-1]: + self.chatbot.pop(-1) # remove the last line + msg = self.parent_conn.recv() # PipeCom + if msg.cmd == "done": + self.chatbot.append([f"结束", msg.content]); self.cnt += 1 + yield from update_ui(chatbot=self.chatbot, history=self.history) + self.terminate(); break + if msg.cmd == "show": + yield from self.overwatch_workdir_file_change() + self.chatbot.append([f"运行阶段-{self.cnt}", msg.content]); self.cnt += 1 + yield from update_ui(chatbot=self.chatbot, history=self.history) + if msg.cmd == "interact": + yield from self.overwatch_workdir_file_change() + self.chatbot.append([f"程序抵达用户反馈节点.", msg.content + + "\n\n等待您的进一步指令." + + "\n\n(1) 一般情况下您不需要说什么, 清空输入区, 然后直接点击“提交”以继续. " + + "\n\n(2) 如果您需要补充些什么, 输入要反馈的内容, 直接点击“提交”以继续. " + + "\n\n(3) 如果您想终止程序, 输入exit, 直接点击“提交”以终止AutoGen并解锁. " + ]) + yield from update_ui(chatbot=self.chatbot, history=self.history) + # do not terminate here, leave the subprocess_worker instance alive + return "wait_feedback" + else: + if '[GPT-Academic] 等待中' not in self.chatbot[-1][-1]: + self.chatbot.append(["[GPT-Academic] 等待AutoGen执行结果 ...", "[GPT-Academic] 等待中"]) + self.chatbot[-1] = [self.chatbot[-1][0], self.chatbot[-1][1].replace("[GPT-Academic] 等待中", "[GPT-Academic] 等待中.")] + yield from update_ui(chatbot=self.chatbot, history=self.history) + + self.terminate() + return "terminate" + + def subprocess_worker_wait_user_feedback(self, wait_msg="wait user feedback"): + # ⭐⭐ 子进程 + patience = 5 * 60 + begin_waiting_time = time.time() + self.child_conn.send(PipeCom("interact", wait_msg)) + while True: + time.sleep(0.5) + if self.child_conn.poll(): + wait_success = True + break + if time.time() - begin_waiting_time > patience: + self.child_conn.send(PipeCom("done", "")) + wait_success = False + break + return wait_success + diff --git a/crazy_functions/crazy_utils.py b/crazy_functions/crazy_utils.py index 3f2ee76..8533d08 100644 --- a/crazy_functions/crazy_utils.py +++ b/crazy_functions/crazy_utils.py @@ -721,8 +721,10 @@ class nougat_interface(): def nougat_with_timeout(self, command, cwd, timeout=3600): import subprocess + from toolbox import ProxyNetworkActivate logging.info(f'正在执行命令 {command}') - process = subprocess.Popen(command, shell=True, cwd=cwd) + with ProxyNetworkActivate("Nougat_Download"): + process = subprocess.Popen(command, shell=True, cwd=cwd, env=os.environ) try: stdout, stderr = process.communicate(timeout=timeout) except subprocess.TimeoutExpired: @@ -767,54 +769,6 @@ def try_install_deps(deps, reload_m=[]): importlib.reload(__import__(m)) -HTML_CSS = """ -.row { - display: flex; - flex-wrap: wrap; -} -.column { - flex: 1; - padding: 10px; -} -.table-header { - font-weight: bold; - border-bottom: 1px solid black; -} -.table-row { - border-bottom: 1px solid lightgray; -} -.table-cell { - padding: 5px; -} -""" - -TABLE_CSS = """ -
-
REPLACE_A
-
REPLACE_B
-
-""" - -class construct_html(): - def __init__(self) -> None: - self.css = HTML_CSS - self.html_string = f'翻译结果' - - - def add_row(self, a, b): - tmp = TABLE_CSS - from toolbox import markdown_convertion - tmp = tmp.replace('REPLACE_A', markdown_convertion(a)) - tmp = tmp.replace('REPLACE_B', markdown_convertion(b)) - self.html_string += tmp - - - def save_file(self, file_name): - with open(os.path.join(get_log_folder(), file_name), 'w', encoding='utf8') as f: - f.write(self.html_string.encode('utf-8', 'ignore').decode()) - return os.path.join(get_log_folder(), file_name) - - def get_plugin_arg(plugin_kwargs, key, default): # 如果参数是空的 if (key in plugin_kwargs) and (plugin_kwargs[key] == ""): plugin_kwargs.pop(key) diff --git a/crazy_functions/latex_fns/latex_actions.py b/crazy_functions/latex_fns/latex_actions.py index dcde0e9..7e561df 100644 --- a/crazy_functions/latex_fns/latex_actions.py +++ b/crazy_functions/latex_fns/latex_actions.py @@ -423,7 +423,7 @@ def write_html(sp_file_contents, sp_file_result, chatbot, project_folder): # write html try: import shutil - from ..crazy_utils import construct_html + from crazy_functions.pdf_fns.report_gen_html import construct_html from toolbox import gen_time_str ch = construct_html() orig = "" diff --git a/crazy_functions/latex_fns/latex_toolbox.py b/crazy_functions/latex_fns/latex_toolbox.py index e5484ca..330cb65 100644 --- a/crazy_functions/latex_fns/latex_toolbox.py +++ b/crazy_functions/latex_fns/latex_toolbox.py @@ -308,7 +308,10 @@ def merge_tex_files_(project_foler, main_file, mode): fp = os.path.join(project_foler, f) fp_ = find_tex_file_ignore_case(fp) if fp_: - with open(fp_, 'r', encoding='utf-8', errors='replace') as fx: c = fx.read() + try: + with open(fp_, 'r', encoding='utf-8', errors='replace') as fx: c = fx.read() + except: + c = f"\n\nWarning from GPT-Academic: LaTex source file is missing!\n\n" else: raise RuntimeError(f'找不到{fp},Tex源文件缺失!') c = merge_tex_files_(project_foler, c, mode) @@ -366,6 +369,14 @@ def insert_abstract(tex_content): # insert "abs_str" on the next line modified_tex = tex_content[:end_line_index+1] + '\n\n' + insert_missing_abs_str + '\n\n' + tex_content[end_line_index+1:] return modified_tex + elif r"\begin{document}" in tex_content: + # find the position of "\maketitle" + find_index = tex_content.index(r"\begin{document}") + # find the nearest ending line + end_line_index = tex_content.find("\n", find_index) + # insert "abs_str" on the next line + modified_tex = tex_content[:end_line_index+1] + '\n\n' + insert_missing_abs_str + '\n\n' + tex_content[end_line_index+1:] + return modified_tex else: return tex_content diff --git a/crazy_functions/pdf_fns/parse_pdf.py b/crazy_functions/pdf_fns/parse_pdf.py index a047efc..9853fd5 100644 --- a/crazy_functions/pdf_fns/parse_pdf.py +++ b/crazy_functions/pdf_fns/parse_pdf.py @@ -73,7 +73,7 @@ def produce_report_markdown(gpt_response_collection, meta, paper_meta_info, chat return res_path def translate_pdf(article_dict, llm_kwargs, chatbot, fp, generated_conclusion_files, TOKEN_LIMIT_PER_FRAGMENT, DST_LANG): - from crazy_functions.crazy_utils import construct_html + from crazy_functions.pdf_fns.report_gen_html import construct_html from crazy_functions.crazy_utils import breakdown_txt_to_satisfy_token_limit_for_pdf from crazy_functions.crazy_utils import request_gpt_model_in_new_thread_with_ui_alive from crazy_functions.crazy_utils import request_gpt_model_multi_threads_with_very_awesome_ui_and_high_efficiency diff --git a/crazy_functions/pdf_fns/report_gen_html.py b/crazy_functions/pdf_fns/report_gen_html.py new file mode 100644 index 0000000..2182921 --- /dev/null +++ b/crazy_functions/pdf_fns/report_gen_html.py @@ -0,0 +1,58 @@ +from toolbox import update_ui, get_conf, trimmed_format_exc, get_log_folder +import os + + + + +class construct_html(): + def __init__(self) -> None: + self.html_string = "" + + def add_row(self, a, b): + from toolbox import markdown_convertion + template = """ + { + primary_col: { + header: String.raw`__PRIMARY_HEADER__`, + msg: String.raw`__PRIMARY_MSG__`, + }, + secondary_rol: { + header: String.raw`__SECONDARY_HEADER__`, + msg: String.raw`__SECONDARY_MSG__`, + } + }, + """ + def std(str): + str = str.replace(r'`',r'`') + if str.endswith("\\"): str += ' ' + if str.endswith("}"): str += ' ' + if str.endswith("$"): str += ' ' + return str + + template_ = template + a_lines = a.split('\n') + b_lines = b.split('\n') + + if len(a_lines) == 1 or len(a_lines[0]) > 50: + template_ = template_.replace("__PRIMARY_HEADER__", std(a[:20])) + template_ = template_.replace("__PRIMARY_MSG__", std(markdown_convertion(a))) + else: + template_ = template_.replace("__PRIMARY_HEADER__", std(a_lines[0])) + template_ = template_.replace("__PRIMARY_MSG__", std(markdown_convertion('\n'.join(a_lines[1:])))) + + if len(b_lines) == 1 or len(b_lines[0]) > 50: + template_ = template_.replace("__SECONDARY_HEADER__", std(b[:20])) + template_ = template_.replace("__SECONDARY_MSG__", std(markdown_convertion(b))) + else: + template_ = template_.replace("__SECONDARY_HEADER__", std(b_lines[0])) + template_ = template_.replace("__SECONDARY_MSG__", std(markdown_convertion('\n'.join(b_lines[1:])))) + self.html_string += template_ + + def save_file(self, file_name): + from toolbox import get_log_folder + with open('crazy_functions/pdf_fns/report_template.html', 'r', encoding='utf8') as f: + html_template = f.read() + html_template = html_template.replace("__TF_ARR__", self.html_string) + with open(os.path.join(get_log_folder(), file_name), 'w', encoding='utf8') as f: + f.write(html_template.encode('utf-8', 'ignore').decode()) + return os.path.join(get_log_folder(), file_name) diff --git a/crazy_functions/pdf_fns/report_template.html b/crazy_functions/pdf_fns/report_template.html new file mode 100644 index 0000000..39a1e7c --- /dev/null +++ b/crazy_functions/pdf_fns/report_template.html @@ -0,0 +1,104 @@ + + + + + + __TITLE__ + + + + + +
+

文章目录

+ +
+ + + diff --git a/crazy_functions/多智能体.py b/crazy_functions/多智能体.py new file mode 100644 index 0000000..35c0cf5 --- /dev/null +++ b/crazy_functions/多智能体.py @@ -0,0 +1,96 @@ +# 本源代码中, ⭐ = 关键步骤 +""" +测试: + - 裁剪图像,保留下半部分 + - 交换图像的蓝色通道和红色通道 + - 将图像转为灰度图像 + - 将csv文件转excel表格 + +Testing: + - Crop the image, keeping the bottom half. + - Swap the blue channel and red channel of the image. + - Convert the image to grayscale. + - Convert the CSV file to an Excel spreadsheet. +""" + + +from toolbox import CatchException, update_ui, gen_time_str, trimmed_format_exc, ProxyNetworkActivate +from toolbox import get_conf, select_api_key, update_ui_lastest_msg, Singleton +from crazy_functions.crazy_utils import request_gpt_model_in_new_thread_with_ui_alive, get_plugin_arg +from crazy_functions.crazy_utils import input_clipping, try_install_deps +from crazy_functions.agent_fns.persistent import GradioMultiuserManagerForPersistentClasses +from crazy_functions.agent_fns.auto_agent import AutoGenMath +import time + + +@CatchException +def 多智能体终端(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port): + """ + txt 输入栏用户输入的文本,例如需要翻译的一段话,再例如一个包含了待处理文件的路径 + llm_kwargs gpt模型参数,如温度和top_p等,一般原样传递下去就行 + plugin_kwargs 插件模型的参数 + chatbot 聊天显示框的句柄,用于显示给用户 + history 聊天历史,前情提要 + system_prompt 给gpt的静默提醒 + web_port 当前软件运行的端口号 + """ + # 检查当前的模型是否符合要求 + supported_llms = ['gpt-3.5-turbo-16k', 'gpt-4', 'gpt-4-32k'] + llm_kwargs['api_key'] = select_api_key(llm_kwargs['api_key'], llm_kwargs['llm_model']) + if llm_kwargs['llm_model'] not in supported_llms: + chatbot.append([f"处理任务: {txt}", f"当前插件只支持{str(supported_llms)}, 当前模型{llm_kwargs['llm_model']}."]) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 + return + + # 检查当前的模型是否符合要求 + API_URL_REDIRECT, = get_conf('API_URL_REDIRECT') + if len(API_URL_REDIRECT) > 0: + chatbot.append([f"处理任务: {txt}", f"暂不支持中转."]) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 + return + + # 尝试导入依赖,如果缺少依赖,则给出安装建议 + try: + import autogen, docker + except: + chatbot.append([ f"处理任务: {txt}", + f"导入软件依赖失败。使用该模块需要额外依赖,安装方法```pip install --upgrade pyautogen docker```。"]) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 + return + + # 尝试导入依赖,如果缺少依赖,则给出安装建议 + try: + import autogen + import glob, os, time, subprocess + subprocess.Popen(['docker', '--version']) + except: + chatbot.append([f"处理任务: {txt}", f"缺少docker运行环境!"]) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 + return + + # 解锁插件 + chatbot.get_cookies()['lock_plugin'] = None + persistent_class_multi_user_manager = GradioMultiuserManagerForPersistentClasses() + user_uuid = chatbot.get_cookies().get('uuid') + persistent_key = f"{user_uuid}->多智能体终端" + if persistent_class_multi_user_manager.already_alive(persistent_key): + # 当已经存在一个正在运行的多智能体终端时,直接将用户输入传递给它,而不是再次启动一个新的多智能体终端 + print('[debug] feed new user input') + executor = persistent_class_multi_user_manager.get(persistent_key) + exit_reason = yield from executor.main_process_ui_control(txt, create_or_resume="resume") + else: + # 运行多智能体终端 (首次) + print('[debug] create new executor instance') + history = [] + chatbot.append(["正在启动: 多智能体终端", "插件动态生成, 执行开始, 作者 Microsoft & Binary-Husky."]) + yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 + executor = AutoGenMath(llm_kwargs, plugin_kwargs, chatbot, history, system_prompt, web_port) + persistent_class_multi_user_manager.set(persistent_key, executor) + exit_reason = yield from executor.main_process_ui_control(txt, create_or_resume="create") + + if exit_reason == "wait_feedback": + # 当用户点击了“等待反馈”按钮时,将executor存储到cookie中,等待用户的再次调用 + executor.chatbot.get_cookies()['lock_plugin'] = 'crazy_functions.多智能体->多智能体终端' + else: + executor.chatbot.get_cookies()['lock_plugin'] = None + yield from update_ui(chatbot=executor.chatbot, history=executor.history) # 更新状态 diff --git a/crazy_functions/批量翻译PDF文档_NOUGAT.py b/crazy_functions/批量翻译PDF文档_NOUGAT.py index ff3a862..3e50c93 100644 --- a/crazy_functions/批量翻译PDF文档_NOUGAT.py +++ b/crazy_functions/批量翻译PDF文档_NOUGAT.py @@ -97,7 +97,8 @@ def 解析PDF_基于NOUGAT(file_manifest, project_folder, llm_kwargs, plugin_kwa generated_conclusion_files = [] generated_html_files = [] DST_LANG = "中文" - from crazy_functions.crazy_utils import nougat_interface, construct_html + from crazy_functions.crazy_utils import nougat_interface + from crazy_functions.pdf_fns.report_gen_html import construct_html nougat_handle = nougat_interface() for index, fp in enumerate(file_manifest): chatbot.append(["当前进度:", f"正在解析论文,请稍候。(第一次运行时,需要花费较长时间下载NOUGAT参数)"]); yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 diff --git a/crazy_functions/批量翻译PDF文档_多线程.py b/crazy_functions/批量翻译PDF文档_多线程.py index 8ada6e0..79c4a26 100644 --- a/crazy_functions/批量翻译PDF文档_多线程.py +++ b/crazy_functions/批量翻译PDF文档_多线程.py @@ -63,7 +63,7 @@ def 解析PDF_基于GROBID(file_manifest, project_folder, llm_kwargs, plugin_kwa generated_conclusion_files = [] generated_html_files = [] DST_LANG = "中文" - from crazy_functions.crazy_utils import construct_html + from crazy_functions.pdf_fns.report_gen_html import construct_html for index, fp in enumerate(file_manifest): chatbot.append(["当前进度:", f"正在连接GROBID服务,请稍候: {grobid_url}\n如果等待时间过长,请修改config中的GROBID_URL,可修改成本地GROBID服务。"]); yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 article_dict = parse_pdf(fp, grobid_url) @@ -86,7 +86,7 @@ def 解析PDF(file_manifest, project_folder, llm_kwargs, plugin_kwargs, chatbot, TOKEN_LIMIT_PER_FRAGMENT = 1024 generated_conclusion_files = [] generated_html_files = [] - from crazy_functions.crazy_utils import construct_html + from crazy_functions.pdf_fns.report_gen_html import construct_html for index, fp in enumerate(file_manifest): # 读取PDF文件 file_content, page_one = read_and_clean_pdf_text(fp) diff --git a/docs/translate_english.json b/docs/translate_english.json index f3ec0c4..c13ac81 100644 --- a/docs/translate_english.json +++ b/docs/translate_english.json @@ -2649,5 +2649,144 @@ "使用zip压缩格式": "Using zip compression format", "受到google限制": "Restricted by Google", "如果是": "If it is", - "不用担心": "don't worry" + "不用担心": "don't worry", + "显示/隐藏自定义菜单": "Show/Hide Custom Menu", + "1. 输入文本": "1. Enter Text", + "微软AutoGen": "Microsoft AutoGen", + "在没有声音之后": "After No Sound", + "⭐ 主进程 Docker 外挂文件夹监控": "⭐ Main Process Docker External Folder Monitoring", + "请求任务": "Request Task", + "推荐上传压缩文件": "Recommend Uploading Compressed File", + "我准备好处理下一个问题了": "I'm ready to handle the next question", + "输入要反馈的内容": "Enter the content to be feedbacked", + "当已经存在一个正在运行的MultiAgentTerminal时": "When there is already a running MultiAgentTerminal", + "也根据时间间隔": "Also according to the time interval", + "自定义功能": "Custom Function", + "上传文件后会自动把输入区修改为相应路径": "After uploading the file, the input area will be automatically modified to the corresponding path", + "缺少docker运行环境!": "Missing docker runtime environment!", + "暂不支持中转": "Transit is not supported temporarily", + "一些第三方接口的出现这样的错误": "Some third-party interfaces encounter such errors", + "项目Wiki": "Project Wiki", + "但是我们把上一帧同样加上": "But we also add the previous frame", + "AutoGen 执行失败": "AutoGen execution failed", + "程序抵达用户反馈节点": "The program reaches the user feedback node", + "预制功能": "Prefabricated Function", + "输入新按钮名称": "Enter the new button name", + "| 不需要输入参数": "| No input parameters required", + "如果有新文件出现": "If there is a new file", + "Bug反馈": "Bug Feedback", + "指定翻译成何种语言": "Specify the language to translate into", + "点击保存当前的对话按钮": "Click the save current conversation button", + "如果您需要补充些什么": "If you need to add something", + "HTTPS 秘钥和证书": "HTTPS Key and Certificate", + "输入exit": "Enter exit", + "输入新提示后缀": "Enter a new prompt suffix", + "如果是文本文件": "If it is a text file", + "支持动态切换主题": "Support dynamic theme switching", + "并与self.previous_work_dir_files中所记录的文件进行对比": "And compare with the files recorded in self.previous_work_dir_files", + "作者 Microsoft & Binary-Husky": "Author Microsoft & Binary-Husky", + "请在自定义菜单中定义提示词前缀": "Please define the prefix of the prompt word in the custom menu", + "一般情况下您不需要说什么": "In general, you don't need to say anything", + "「暗色主题已启用": "Dark theme enabled", + "继续向服务器发送n次音频数据": "Continue to send audio data to the server n times", + "获取fp的拓展名": "Get the extension name of fp", + "指令安装内置Gradio及其他依赖": "Command to install built-in Gradio and other dependencies", + "查看自动更新": "Check for automatic updates", + "则更新self.previous_work_dir_files中": "Then update in self.previous_work_dir_files", + "看门狗耐心": "Watchdog patience", + "检测到新生图像": "Detected new image", + "等待AutoGen执行结果": "Waiting for AutoGen execution result", + "自定义菜单": "Custom menu", + "保持链接激活": "Keep the link active", + "已经被新插件取代": "Has been replaced by a new plugin", + "检查当前的模型是否符合要求": "Check if the current model meets the requirements", + "交互功能模板Demo函数": "Interactive function template Demo function", + "上一帧没有人声": "No human voice in the previous frame", + "用于判断异常": "Used to judge exceptions", + "请阅读Wiki": "Please read the Wiki", + "查找wallhaven.cc的壁纸": "Search for wallpapers on wallhaven.cc", + "2. 点击任意基础功能区按钮": "2. Click any button in the basic function area", + "一些垃圾第三方接口的出现这样的错误": "Some errors caused by garbage third-party interfaces", + "再次点击VoidTerminal": "Click VoidTerminal again", + "结束信号已明确": "The end signal is clear", + "获取代理失败 无代理状态下很可能无法访问OpenAI家族的模型及谷歌学术 建议": "Failed to get proxy. It is very likely that you will not be able to access OpenAI family models and Google Scholar without a proxy. It is recommended", + "界面外观": "Interface appearance", + "如果您想终止程序": "If you want to terminate the program", + "2. 点击任意函数插件区按钮": "Click any function plugin area button", + "绕过openai访问频率限制": "Bypass openai access frequency limit", + "配置暗色主题或亮色主题": "Configure dark theme or light theme", + "自定义按钮的最大数量限制": "Maximum number limit for custom buttons", + "函数插件区使用说明": "Instructions for function plugin area", + "如何语音对话": "How to have a voice conversation", + "清空输入区": "Clear input area", + "文档清单如下": "The document list is as follows", + "由 audio_convertion_thread": "By audio_convertion_thread", + "音频的可视化表现": "Visual representation of audio", + "然后直接点击“提交”以继续": "Then click 'Submit' to continue", + "运行MultiAgentTerminal": "Run MultiAgentTerminal", + "自定义按钮1": "Custom button 1", + "查看历史上的今天事件": "View events from history", + "如遇到Bug请前往": "If you encounter a bug, please go to", + "当前插件只支持": "The current plugin only supports", + "而不是再次启动一个新的MultiAgentTerminal": "Instead of starting a new MultiAgentTerminal again", + "用户代理或助理代理未定义": "User agent or assistant agent is not defined", + "运行阶段-": "Running phase-", + "随机选择": "Random selection", + "直接点击“提交”以继续": "Click 'Submit' to continue", + "使用项目内置Gradio获取最优体验! 请运行": "Use the built-in Gradio for the best experience! Please run", + "直接点击“提交”以终止AutoGen并解锁": "Click 'Submit' to terminate AutoGen and unlock", + "Github源代码开源和更新": "Github source code is open source and updated", + "直接将用户输入传递给它": "Pass user input directly to it", + "这是一个面向开发者的插件Demo": "This is a plugin demo for developers", + "帮助": "Help", + "普通对话使用说明": "Instructions for normal conversation", + "自定义按钮": "Custom button", + "即使没有声音": "Even without sound", + "⭐ 主进程": "⭐ Main process", + "基础功能区使用说明": "Basic Function Area Usage Instructions", + "提前读取一些信息": "Read some information in advance", + "当用户点击了“等待反馈”按钮时": "When the user clicks the 'Wait for Feedback' button", + "选择一个需要自定义基础功能区按钮": "Select a button in the Basic Function Area that needs to be customized", + "VoidTerminal使用说明": "VoidTerminal Usage Instructions", + "兼容一下吧": "Let's make it compatible", + "⭐⭐ 子进程执行": "⭐⭐ Subprocess execution", + "首次": "For the first time", + "则直接显示文本内容": "Then display the text content directly", + "更新状态": "Update status", + "2. 点击提交": "2. Click Submit", + "⭐⭐ 子进程": "⭐⭐ Subprocess", + "输入新提示前缀": "Enter a new prompt prefix", + "等待用户输入超时": "Wait for user input timeout", + "把新文件和发生变化的文件的路径记录到 change_list 中": "Record the paths of new files and files that have changed in change_list", + "或者上传文件": "Or upload a file", + "或者文件的修改时间发生变化": "Or the modification time of the file has changed", + "1. 输入路径/问题": "1. Enter path/question", + "尝试直接连接": "Try to connect directly", + "未来将删除": "Will be deleted in the future", + "请在自定义菜单中定义提示词后缀": "Please define the suffix of the prompt word in the custom menu", + "将executor存储到cookie中": "Store the executor in the cookie", + "1. 输入问题": "1. Enter question", + "发送一些音频片段给服务器": "Send some audio clips to the server", + "点击VoidTerminal": "Click VoidTerminal", + "扫描路径下的所有文件": "Scan all files under the path", + "检测到新生文档": "Detect new documents", + "预热tiktoken模块": "Preheat the tiktoken module", + "等待您的进一步指令": "Waiting for your further instructions", + "实时语音对话": "Real-time voice conversation", + "确认并保存": "Confirm and save", + "「亮色主题已启用": "Light theme enabled", + "终止AutoGen程序": "Terminate AutoGen program", + "然后根据提示输入指令": "Then enter the command as prompted", + "请上传本地文件/压缩包供“函数插件区”功能调用": "Please upload local files/zip packages for 'Function Plugin Area' function call", + "上传文件": "Upload file", + "上一帧是否有人说话": "Was there anyone speaking in the previous frame", + "这是一个时刻聆听着的语音对话助手 | 没有输入参数": "This is a voice conversation assistant that is always listening | No input parameters", + "常见问题请查阅": "Please refer to the FAQ for common questions", + "更换模型 & Prompt": "Change model & Prompt", + "如何保存对话": "How to save the conversation", + "处理任务": "Process task", + "加载已保存": "Load saved", + "打开浏览器页面": "Open browser page", + "解锁插件": "Unlock plugin", + "如果话筒激活 / 如果处于回声收尾阶段": "If the microphone is active / If it is in the echo tail stage" } \ No newline at end of file diff --git a/docs/translate_std.json b/docs/translate_std.json index 827dcdb..90eb685 100644 --- a/docs/translate_std.json +++ b/docs/translate_std.json @@ -92,5 +92,7 @@ "批量翻译PDF文档_NOUGAT": "BatchTranslatePDFDocuments_NOUGAT", "解析PDF_基于NOUGAT": "ParsePDF_NOUGAT", "解析一个Matlab项目": "AnalyzeAMatlabProject", - "函数动态生成": "DynamicFunctionGeneration" + "函数动态生成": "DynamicFunctionGeneration", + "多智能体终端": "MultiAgentTerminal", + "多智能体": "MultiAgent" } \ No newline at end of file diff --git a/main.py b/main.py index e7fa3c5..9f38995 100644 --- a/main.py +++ b/main.py @@ -431,7 +431,7 @@ def main(): ssl_certfile=None if SSL_CERTFILE == "" else SSL_CERTFILE, ssl_verify=False, server_port=PORT, - favicon_path="docs/logo.png", + favicon_path=os.path.join(os.path.dirname(__file__), "docs/logo.png"), auth=AUTHENTICATION if len(AUTHENTICATION) != 0 else None, blocked_paths=["config.py","config_private.py","docker-compose.yml","Dockerfile"]) diff --git a/themes/default.py b/themes/default.py index da1f187..6fa2ba5 100644 --- a/themes/default.py +++ b/themes/default.py @@ -1,7 +1,8 @@ +import os import gradio as gr from toolbox import get_conf CODE_HIGHLIGHT, ADD_WAIFU, LAYOUT = get_conf('CODE_HIGHLIGHT', 'ADD_WAIFU', 'LAYOUT') - +theme_dir = os.path.dirname(__file__) def adjust_theme(): try: @@ -57,7 +58,7 @@ def adjust_theme(): button_cancel_text_color_dark="white", ) - with open('themes/common.js', 'r', encoding='utf8') as f: + with open(os.path.join(theme_dir, 'common.js'), 'r', encoding='utf8') as f: js = f"" # 添加一个萌萌的看板娘 @@ -79,7 +80,7 @@ def adjust_theme(): print('gradio版本较旧, 不能自定义字体和颜色') return set_theme -with open("themes/default.css", "r", encoding="utf-8") as f: +with open(os.path.join(theme_dir, 'default.css'), "r", encoding="utf-8") as f: advanced_css = f.read() -with open("themes/common.css", "r", encoding="utf-8") as f: +with open(os.path.join(theme_dir, 'common.css'), "r", encoding="utf-8") as f: advanced_css += f.read() diff --git a/version b/version index d5c2012..1470eb4 100644 --- a/version +++ b/version @@ -1,5 +1,5 @@ { - "version": 3.55, + "version": 3.56, "show_feature": true, - "new_feature": "重新编译Gradio优化使用体验 <-> 新增动态代码解释器(CodeInterpreter) <-> 增加文本回答复制按钮 <-> 细分代理场合 <-> 支持动态选择不同界面主题 <-> 提高稳定性&解决多用户冲突问题 <-> 支持插件分类和更多UI皮肤外观 <-> 支持用户使用自然语言调度各个插件(虚空终端) ! <-> 改进UI,设计新主题 <-> 支持借助GROBID实现PDF高精度翻译 <-> 接入百度千帆平台和文心一言 <-> 接入阿里通义千问、讯飞星火、上海AI-Lab书生 <-> 优化一键升级 <-> 提高arxiv翻译速度和成功率" + "new_feature": "支持动态追加基础功能按钮 <-> 新汇报PDF汇总页面 <-> 重新编译Gradio优化使用体验 <-> 新增动态代码解释器(CodeInterpreter) <-> 增加文本回答复制按钮 <-> 细分代理场合 <-> 支持动态选择不同界面主题 <-> 提高稳定性&解决多用户冲突问题 <-> 支持插件分类和更多UI皮肤外观 <-> 支持用户使用自然语言调度各个插件(虚空终端) ! <-> 改进UI,设计新主题 <-> 支持借助GROBID实现PDF高精度翻译 <-> 接入百度千帆平台和文心一言 <-> 接入阿里通义千问、讯飞星火、上海AI-Lab书生 <-> 优化一键升级 <-> 提高arxiv翻译速度和成功率" }