From 24780ee628d392df501e22f0a84cc234f0ad194f Mon Sep 17 00:00:00 2001 From: qingxu fu <505030475@qq.com> Date: Tue, 4 Apr 2023 22:56:06 +0800 Subject: [PATCH] merge --- main.py | 19 +++++++++++-------- toolbox.py | 16 ++++++++++++++-- 2 files changed, 25 insertions(+), 10 deletions(-) diff --git a/main.py b/main.py index 48ea670..123374e 100644 --- a/main.py +++ b/main.py @@ -1,7 +1,7 @@ import os; os.environ['no_proxy'] = '*' # 避免代理网络产生意外污染 import gradio as gr from predict import predict -from toolbox import format_io, find_free_port, on_file_uploaded, on_report_generated, get_conf +from toolbox import format_io, find_free_port, on_file_uploaded, on_report_generated, get_conf, ArgsGeneralWrapper # 建议您复制一个config_private.py放自己的秘密, 如API和代理网址, 避免不小心传github被别人看到 proxies, WEB_PORT, LLM_MODEL, CONCURRENT_COUNT, AUTHENTICATION, CHATBOT_HEIGHT = \ @@ -87,8 +87,12 @@ with gr.Blocks(theme=set_theme, analytics_enabled=False, css=advanced_css) as de system_prompt = gr.Textbox(show_label=True, placeholder=f"System Prompt", label="System prompt", value=initial_prompt) top_p = gr.Slider(minimum=-0, maximum=1.0, value=1.0, step=0.01,interactive=True, label="Top-p (nucleus sampling)",) temperature = gr.Slider(minimum=-0, maximum=2.0, value=1.0, step=0.01, interactive=True, label="Temperature",) - checkboxes = gr.CheckboxGroup(["基础功能区", "函数插件区"], value=["基础功能区", "函数插件区"], label="显示/隐藏功能区") + checkboxes = gr.CheckboxGroup(["基础功能区", "函数插件区", "输入区2"], value=["基础功能区", "函数插件区"], label="显示/隐藏功能区") gr.Markdown(description) + with gr.Accordion("输入区", open=True, visible=False) as input_crazy_fn: + with gr.Row(): + txt2 = gr.Textbox(show_label=False, placeholder="Input question here.", label="输入区2").style(container=False) + # 功能区显示开关与功能区的互动 def fn_area_visibility(a): ret = {} @@ -97,17 +101,16 @@ with gr.Blocks(theme=set_theme, analytics_enabled=False, css=advanced_css) as de return ret checkboxes.select(fn_area_visibility, [checkboxes], [area_basic_fn, area_crazy_fn] ) # 整理反复出现的控件句柄组合 - input_combo = [txt, top_p, temperature, chatbot, history, system_prompt] + input_combo = [txt, txt2, top_p, temperature, chatbot, history, system_prompt] output_combo = [chatbot, history, status] - predict_args = dict(fn=predict, inputs=input_combo, outputs=output_combo) - empty_txt_args = dict(fn=lambda: "", inputs=[], outputs=[txt]) # 用于在提交后清空输入栏 + predict_args = dict(fn=ArgsGeneralWrapper(predict), inputs=input_combo, outputs=output_combo) # 提交按钮、重置按钮 - cancel_handles.append(txt.submit(**predict_args)) #; txt.submit(**empty_txt_args) 在提交后清空输入栏 - cancel_handles.append(submitBtn.click(**predict_args)) #; submitBtn.click(**empty_txt_args) 在提交后清空输入栏 + cancel_handles.append(txt.submit(**predict_args)) + cancel_handles.append(submitBtn.click(**predict_args)) resetBtn.click(lambda: ([], [], "已重置"), None, output_combo) # 基础功能区的回调函数注册 for k in functional: - click_handle = functional[k]["Button"].click(predict, [*input_combo, gr.State(True), gr.State(k)], output_combo) + click_handle = functional[k]["Button"].click(fn=ArgsGeneralWrapper(predict), inputs=[*input_combo, gr.State(True), gr.State(k)], outputs=output_combo) cancel_handles.append(click_handle) # 文件上传区,接收文件后与chatbot的互动 file_upload.upload(on_file_uploaded, [file_upload, chatbot, txt], [chatbot, txt]) diff --git a/toolbox.py b/toolbox.py index c55a48e..00bb03e 100644 --- a/toolbox.py +++ b/toolbox.py @@ -2,6 +2,18 @@ import markdown, mdtex2html, threading, importlib, traceback, importlib, inspect from show_math import convert as convert_math from functools import wraps, lru_cache +def ArgsGeneralWrapper(f): + """ + 装饰器函数,用于重组输入参数,改变输入参数的顺序与结构。 + """ + def decorated(txt, txt2, top_p, temperature, chatbot, history, system_prompt, *args, **kwargs): + txt_passon = txt + if txt == "" and txt2 != "": txt_passon = txt2 + yield from f(txt_passon, top_p, temperature, chatbot, history, system_prompt, *args, **kwargs) + + return decorated + + def get_reduce_token_percent(text): try: # text = "maximum context length is 4097 tokens. However, your messages resulted in 4870 tokens" @@ -116,7 +128,7 @@ def CatchException(f): from toolbox import get_conf proxies, = get_conf('proxies') tb_str = '```\n' + traceback.format_exc() + '```' - if len(chatbot) == 0: chatbot.append(["插件调度异常","异常原因"]) + if chatbot is None or len(chatbot) == 0: chatbot = [["插件调度异常","异常原因"]] chatbot[-1] = (chatbot[-1][0], f"[Local Message] 实验性函数调用出错: \n\n{tb_str} \n\n当前代理可用性: \n\n{check_proxy(proxies)}") yield chatbot, history, f'异常 {e}' return decorated @@ -129,7 +141,7 @@ def HotReload(f): def decorated(*args, **kwargs): fn_name = f.__name__ f_hot_reload = getattr(importlib.reload(inspect.getmodule(f)), fn_name) - yield from f_hot_reload(*args, **kwargs) + yield from ArgsGeneralWrapper(f_hot_reload)(*args, **kwargs) return decorated def report_execption(chatbot, history, a, b):