添加Golang、Java等项目的支持
This commit is contained in:
parent
9540cf9448
commit
3387b5acb0
12
README.md
12
README.md
@ -36,16 +36,14 @@ https://github.com/polarwinkel/mdtex2html
|
||||
自定义快捷键 | 支持自定义快捷键
|
||||
配置代理服务器 | 支持配置代理服务器
|
||||
模块化设计 | 支持自定义高阶的实验性功能
|
||||
自我程序剖析 | [函数插件] 一键读懂本项目的源代码
|
||||
程序剖析 | [函数插件] 一键可以剖析其他Python/C++等项目
|
||||
读论文 | [函数插件] 一键解读latex论文全文并生成摘要
|
||||
arxiv小助手 | [函数插件] 输入url一键翻译摘要+下载论文
|
||||
批量注释生成 | [函数插件] 一键批量生成函数注释
|
||||
chat分析报告生成 | [函数插件] 运行后自动生成总结汇报
|
||||
自我程序剖析 | [实验性功能] 一键读懂本项目的源代码
|
||||
程序剖析 | [实验性功能] 一键可以剖析其他Python/C++项目
|
||||
读论文 | [实验性功能] 一键解读latex论文全文并生成摘要
|
||||
批量注释生成 | [实验性功能] 一键批量生成函数注释
|
||||
chat分析报告生成 | [实验性功能] 运行后自动生成总结汇报
|
||||
公式显示 | 可以同时显示公式的tex形式和渲染形式
|
||||
图片显示 | 可以在markdown中显示图片
|
||||
支持GPT输出的markdown表格 | 可以输出支持GPT的markdown表格
|
||||
本地大语言模型接口 | 借助[TGUI](https://github.com/oobabooga/text-generation-webui)接入galactica等本地语言模型
|
||||
…… | ……
|
||||
|
||||
</div>
|
||||
|
@ -134,8 +134,7 @@ def get_name(_url_):
|
||||
@CatchException
|
||||
def 下载arxiv论文并翻译摘要(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT):
|
||||
|
||||
CRAZY_FUNCTION_INFO = "下载arxiv论文并翻译摘要,作者 binary-husky。正在提取摘要并下载PDF文档……"
|
||||
raise RuntimeError()
|
||||
CRAZY_FUNCTION_INFO = "下载arxiv论文并翻译摘要,函数插件作者[binary-husky]。正在提取摘要并下载PDF文档……"
|
||||
import glob
|
||||
import os
|
||||
|
||||
@ -180,8 +179,8 @@ def 下载arxiv论文并翻译摘要(txt, top_p, temperature, chatbot, history,
|
||||
# 写入文件
|
||||
import shutil
|
||||
# 重置文件的创建时间
|
||||
shutil.copyfile(pdf_path, pdf_path.replace('.pdf', '.autodownload.pdf')); os.remove(pdf_path)
|
||||
shutil.copyfile(pdf_path, f'./gpt_log/{os.path.basename(pdf_path)}'); os.remove(pdf_path)
|
||||
res = write_results_to_file(history)
|
||||
chatbot.append(("完成了吗?", res))
|
||||
chatbot.append(("完成了吗?", res + "\n\nPDF文件也已经下载"))
|
||||
yield chatbot, history, msg
|
||||
|
||||
|
@ -1,9 +1,7 @@
|
||||
from predict import predict_no_ui
|
||||
from toolbox import CatchException, report_execption, write_results_to_file, predict_no_ui_but_counting_down
|
||||
|
||||
fast_debug = False
|
||||
|
||||
|
||||
def 解析源代码(file_manifest, project_folder, top_p, temperature, chatbot, history, systemPromptTxt):
|
||||
import time, glob, os
|
||||
print('begin analysis on:', file_manifest)
|
||||
@ -11,22 +9,20 @@ def 解析源代码(file_manifest, project_folder, top_p, temperature, chatbot,
|
||||
with open(fp, 'r', encoding='utf-8') as f:
|
||||
file_content = f.read()
|
||||
|
||||
prefix = "接下来请你逐文件分析下面的工程" if index == 0 else ""
|
||||
prefix = "接下来请你逐文件分析下面的工程" if index==0 else ""
|
||||
i_say = prefix + f'请对下面的程序文件做一个概述文件名是{os.path.relpath(fp, project_folder)},文件代码是 ```{file_content}```'
|
||||
i_say_show_user = prefix + f'[{index}/{len(file_manifest)}] 请对下面的程序文件做一个概述: {os.path.abspath(fp)}'
|
||||
chatbot.append((i_say_show_user, "[Local Message] waiting gpt response."))
|
||||
yield chatbot, history, '正常'
|
||||
|
||||
if not fast_debug:
|
||||
if not fast_debug:
|
||||
msg = '正常'
|
||||
|
||||
# ** gpt request **
|
||||
gpt_say = yield from predict_no_ui_but_counting_down(i_say, i_say_show_user, chatbot, top_p, temperature,
|
||||
history=[]) # 带超时倒计时
|
||||
gpt_say = yield from predict_no_ui_but_counting_down(i_say, i_say_show_user, chatbot, top_p, temperature, history=[]) # 带超时倒计时
|
||||
|
||||
chatbot[-1] = (i_say_show_user, gpt_say)
|
||||
history.append(i_say_show_user);
|
||||
history.append(gpt_say)
|
||||
history.append(i_say_show_user); history.append(gpt_say)
|
||||
yield chatbot, history, msg
|
||||
if not fast_debug: time.sleep(2)
|
||||
|
||||
@ -35,48 +31,45 @@ def 解析源代码(file_manifest, project_folder, top_p, temperature, chatbot,
|
||||
chatbot.append((i_say, "[Local Message] waiting gpt response."))
|
||||
yield chatbot, history, '正常'
|
||||
|
||||
if not fast_debug:
|
||||
if not fast_debug:
|
||||
msg = '正常'
|
||||
# ** gpt request **
|
||||
gpt_say = yield from predict_no_ui_but_counting_down(i_say, i_say, chatbot, top_p, temperature,
|
||||
history=history) # 带超时倒计时
|
||||
|
||||
gpt_say = yield from predict_no_ui_but_counting_down(i_say, i_say, chatbot, top_p, temperature, history=history) # 带超时倒计时
|
||||
|
||||
chatbot[-1] = (i_say, gpt_say)
|
||||
history.append(i_say);
|
||||
history.append(gpt_say)
|
||||
history.append(i_say); history.append(gpt_say)
|
||||
yield chatbot, history, msg
|
||||
res = write_results_to_file(history)
|
||||
chatbot.append(("完成了吗?", res))
|
||||
yield chatbot, history, msg
|
||||
|
||||
|
||||
|
||||
|
||||
@CatchException
|
||||
def 解析项目本身(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT):
|
||||
history = [] # 清空历史,以免输入溢出
|
||||
history = [] # 清空历史,以免输入溢出
|
||||
import time, glob, os
|
||||
file_manifest = [f for f in glob.glob('./*.py') if ('test_project' not in f) and ('gpt_log' not in f)] + \
|
||||
[f for f in glob.glob('./crazy_functions/*.py') if
|
||||
('test_project' not in f) and ('gpt_log' not in f)]
|
||||
[f for f in glob.glob('./crazy_functions/*.py') if ('test_project' not in f) and ('gpt_log' not in f)]
|
||||
for index, fp in enumerate(file_manifest):
|
||||
# if 'test_project' in fp: continue
|
||||
with open(fp, 'r', encoding='utf-8') as f:
|
||||
file_content = f.read()
|
||||
|
||||
prefix = "接下来请你分析自己的程序构成,别紧张," if index == 0 else ""
|
||||
prefix = "接下来请你分析自己的程序构成,别紧张," if index==0 else ""
|
||||
i_say = prefix + f'请对下面的程序文件做一个概述文件名是{fp},文件代码是 ```{file_content}```'
|
||||
i_say_show_user = prefix + f'[{index}/{len(file_manifest)}] 请对下面的程序文件做一个概述: {os.path.abspath(fp)}'
|
||||
chatbot.append((i_say_show_user, "[Local Message] waiting gpt response."))
|
||||
yield chatbot, history, '正常'
|
||||
|
||||
if not fast_debug:
|
||||
if not fast_debug:
|
||||
# ** gpt request **
|
||||
# gpt_say = predict_no_ui(inputs=i_say, top_p=top_p, temperature=temperature)
|
||||
gpt_say = yield from predict_no_ui_but_counting_down(i_say, i_say_show_user, chatbot, top_p, temperature,
|
||||
history=[], long_connection=True) # 带超时倒计时
|
||||
gpt_say = yield from predict_no_ui_but_counting_down(i_say, i_say_show_user, chatbot, top_p, temperature, history=[], long_connection=True) # 带超时倒计时
|
||||
|
||||
chatbot[-1] = (i_say_show_user, gpt_say)
|
||||
history.append(i_say_show_user);
|
||||
history.append(gpt_say)
|
||||
history.append(i_say_show_user); history.append(gpt_say)
|
||||
yield chatbot, history, '正常'
|
||||
time.sleep(2)
|
||||
|
||||
@ -84,35 +77,32 @@ def 解析项目本身(txt, top_p, temperature, chatbot, history, systemPromptTx
|
||||
chatbot.append((i_say, "[Local Message] waiting gpt response."))
|
||||
yield chatbot, history, '正常'
|
||||
|
||||
if not fast_debug:
|
||||
if not fast_debug:
|
||||
# ** gpt request **
|
||||
# gpt_say = predict_no_ui(inputs=i_say, top_p=top_p, temperature=temperature, history=history)
|
||||
gpt_say = yield from predict_no_ui_but_counting_down(i_say, i_say, chatbot, top_p, temperature, history=history,
|
||||
long_connection=True) # 带超时倒计时
|
||||
gpt_say = yield from predict_no_ui_but_counting_down(i_say, i_say, chatbot, top_p, temperature, history=history, long_connection=True) # 带超时倒计时
|
||||
|
||||
chatbot[-1] = (i_say, gpt_say)
|
||||
history.append(i_say);
|
||||
history.append(gpt_say)
|
||||
history.append(i_say); history.append(gpt_say)
|
||||
yield chatbot, history, '正常'
|
||||
res = write_results_to_file(history)
|
||||
chatbot.append(("完成了吗?", res))
|
||||
yield chatbot, history, '正常'
|
||||
|
||||
|
||||
@CatchException
|
||||
def 解析一个Python项目(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT):
|
||||
history = [] # 清空历史,以免输入溢出
|
||||
history = [] # 清空历史,以免输入溢出
|
||||
import glob, os
|
||||
if os.path.exists(txt):
|
||||
project_folder = txt
|
||||
else:
|
||||
if txt == "": txt = '空空如也的输入栏'
|
||||
report_execption(chatbot, history, a=f"解析项目: {txt}", b=f"找不到本地项目或无权访问: {txt}")
|
||||
report_execption(chatbot, history, a = f"解析项目: {txt}", b = f"找不到本地项目或无权访问: {txt}")
|
||||
yield chatbot, history, '正常'
|
||||
return
|
||||
file_manifest = [f for f in glob.glob(f'{project_folder}/**/*.py', recursive=True)]
|
||||
if len(file_manifest) == 0:
|
||||
report_execption(chatbot, history, a=f"解析项目: {txt}", b=f"找不到任何python文件: {txt}")
|
||||
report_execption(chatbot, history, a = f"解析项目: {txt}", b = f"找不到任何python文件: {txt}")
|
||||
yield chatbot, history, '正常'
|
||||
return
|
||||
yield from 解析源代码(file_manifest, project_folder, top_p, temperature, chatbot, history, systemPromptTxt)
|
||||
@ -120,41 +110,40 @@ def 解析一个Python项目(txt, top_p, temperature, chatbot, history, systemPr
|
||||
|
||||
@CatchException
|
||||
def 解析一个C项目的头文件(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT):
|
||||
history = [] # 清空历史,以免输入溢出
|
||||
history = [] # 清空历史,以免输入溢出
|
||||
import glob, os
|
||||
if os.path.exists(txt):
|
||||
project_folder = txt
|
||||
else:
|
||||
if txt == "": txt = '空空如也的输入栏'
|
||||
report_execption(chatbot, history, a=f"解析项目: {txt}", b=f"找不到本地项目或无权访问: {txt}")
|
||||
report_execption(chatbot, history, a = f"解析项目: {txt}", b = f"找不到本地项目或无权访问: {txt}")
|
||||
yield chatbot, history, '正常'
|
||||
return
|
||||
file_manifest = [f for f in glob.glob(f'{project_folder}/**/*.h', recursive=True)] # + \
|
||||
# [f for f in glob.glob(f'{project_folder}/**/*.cpp', recursive=True)] + \
|
||||
# [f for f in glob.glob(f'{project_folder}/**/*.c', recursive=True)]
|
||||
file_manifest = [f for f in glob.glob(f'{project_folder}/**/*.h', recursive=True)] # + \
|
||||
# [f for f in glob.glob(f'{project_folder}/**/*.cpp', recursive=True)] + \
|
||||
# [f for f in glob.glob(f'{project_folder}/**/*.c', recursive=True)]
|
||||
if len(file_manifest) == 0:
|
||||
report_execption(chatbot, history, a=f"解析项目: {txt}", b=f"找不到任何.h头文件: {txt}")
|
||||
report_execption(chatbot, history, a = f"解析项目: {txt}", b = f"找不到任何.h头文件: {txt}")
|
||||
yield chatbot, history, '正常'
|
||||
return
|
||||
yield from 解析源代码(file_manifest, project_folder, top_p, temperature, chatbot, history, systemPromptTxt)
|
||||
|
||||
|
||||
@CatchException
|
||||
def 解析一个C项目(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT):
|
||||
history = [] # 清空历史,以免输入溢出
|
||||
history = [] # 清空历史,以免输入溢出
|
||||
import glob, os
|
||||
if os.path.exists(txt):
|
||||
project_folder = txt
|
||||
else:
|
||||
if txt == "": txt = '空空如也的输入栏'
|
||||
report_execption(chatbot, history, a=f"解析项目: {txt}", b=f"找不到本地项目或无权访问: {txt}")
|
||||
report_execption(chatbot, history, a = f"解析项目: {txt}", b = f"找不到本地项目或无权访问: {txt}")
|
||||
yield chatbot, history, '正常'
|
||||
return
|
||||
file_manifest = [f for f in glob.glob(f'{project_folder}/**/*.h', recursive=True)] + \
|
||||
file_manifest = [f for f in glob.glob(f'{project_folder}/**/*.h', recursive=True)] + \
|
||||
[f for f in glob.glob(f'{project_folder}/**/*.cpp', recursive=True)] + \
|
||||
[f for f in glob.glob(f'{project_folder}/**/*.c', recursive=True)]
|
||||
if len(file_manifest) == 0:
|
||||
report_execption(chatbot, history, a=f"解析项目: {txt}", b=f"找不到任何.h头文件: {txt}")
|
||||
report_execption(chatbot, history, a = f"解析项目: {txt}", b = f"找不到任何.h头文件: {txt}")
|
||||
yield chatbot, history, '正常'
|
||||
return
|
||||
yield from 解析源代码(file_manifest, project_folder, top_p, temperature, chatbot, history, systemPromptTxt)
|
||||
|
@ -1,13 +1,8 @@
|
||||
from toolbox import HotReload # HotReload 的意思是热更新,修改函数插件后,不需要重启程序,代码直接生效
|
||||
|
||||
# UserVisibleLevel是过滤器参数。
|
||||
# 由于UI界面空间有限,所以通过这种方式决定UI界面中显示哪些插件
|
||||
# 默认函数插件 VisibleLevel 是 0
|
||||
# 当 UserVisibleLevel >= 函数插件的 VisibleLevel 时,该函数插件才会被显示出来
|
||||
UserVisibleLevel = 1
|
||||
|
||||
|
||||
def get_crazy_functionals():
|
||||
###################### 第一组插件 ###########################
|
||||
# [第一组插件]: 最早期编写的项目插件和一些demo
|
||||
from crazy_functions.读文章写摘要 import 读文章写摘要
|
||||
from crazy_functions.生成函数注释 import 批量生成函数注释
|
||||
from crazy_functions.解析项目源代码 import 解析项目本身
|
||||
@ -70,33 +65,44 @@ def get_crazy_functionals():
|
||||
"Function": HotReload(高阶功能模板函数)
|
||||
},
|
||||
}
|
||||
###################### 第二组插件 ###########################
|
||||
# [第二组插件]: 经过充分测试,但功能上距离达到完美状态还差一点点
|
||||
from crazy_functions.批量总结PDF文档 import 批量总结PDF文档
|
||||
from crazy_functions.批量总结PDF文档pdfminer import 批量总结PDF文档pdfminer
|
||||
from crazy_functions.总结word文档 import 总结word文档
|
||||
function_plugins.update({
|
||||
"[仅供开发调试] 批量总结PDF文档": {
|
||||
"Color": "stop",
|
||||
"Function": HotReload(批量总结PDF文档) # HotReload 的意思是热更新,修改函数插件代码后,不需要重启程序,代码直接生效
|
||||
},
|
||||
"[仅供开发调试] 批量总结PDF文档pdfminer": {
|
||||
"Color": "stop",
|
||||
"AsButton": False, # 加入下拉菜单中
|
||||
"Function": HotReload(批量总结PDF文档pdfminer)
|
||||
},
|
||||
"[仅供开发调试] 批量总结Word文档": {
|
||||
"Color": "stop",
|
||||
"Function": HotReload(总结word文档)
|
||||
},
|
||||
})
|
||||
|
||||
# VisibleLevel=1 经过测试,但功能上距离达到完美状态还差一点点
|
||||
if UserVisibleLevel >= 1:
|
||||
from crazy_functions.批量总结PDF文档 import 批量总结PDF文档
|
||||
from crazy_functions.批量总结PDF文档pdfminer import 批量总结PDF文档pdfminer
|
||||
from crazy_functions.总结word文档 import 总结word文档
|
||||
###################### 第三组插件 ###########################
|
||||
# [第三组插件]: 尚未充分测试的函数插件,放在这里
|
||||
try:
|
||||
from crazy_functions.下载arxiv论文翻译摘要 import 下载arxiv论文并翻译摘要
|
||||
function_plugins.update({
|
||||
"[仅供开发调试] 批量总结PDF文档": {
|
||||
"Color": "stop",
|
||||
"Function": HotReload(批量总结PDF文档) # HotReload 的意思是热更新,修改函数插件代码后,不需要重启程序,代码直接生效
|
||||
},
|
||||
"[仅供开发调试] 批量总结PDF文档pdfminer": {
|
||||
"下载arxiv论文并翻译摘要": {
|
||||
"Color": "stop",
|
||||
"AsButton": False, # 加入下拉菜单中
|
||||
"Function": HotReload(批量总结PDF文档pdfminer)
|
||||
},
|
||||
"[仅供开发调试] 批量总结Word文档": {
|
||||
"Color": "stop",
|
||||
"Function": HotReload(总结word文档)
|
||||
},
|
||||
"Function": HotReload(下载arxiv论文并翻译摘要)
|
||||
}
|
||||
})
|
||||
except Exception as err:
|
||||
print(f'[下载arxiv论文并翻译摘要] 插件导入失败 {str(err)}')
|
||||
|
||||
# VisibleLevel=2 尚未充分测试的函数插件,放在这里
|
||||
if UserVisibleLevel >= 2:
|
||||
function_plugins.update({
|
||||
})
|
||||
|
||||
|
||||
###################### 第n组插件 ###########################
|
||||
return function_plugins
|
||||
|
||||
|
||||
|
7
main.py
7
main.py
@ -11,9 +11,8 @@ proxies, WEB_PORT, LLM_MODEL, CONCURRENT_COUNT, AUTHENTICATION, CHATBOT_HEIGHT =
|
||||
PORT = find_free_port() if WEB_PORT <= 0 else WEB_PORT
|
||||
if not AUTHENTICATION: AUTHENTICATION = None
|
||||
|
||||
title = "ChatGPT 学术优化" if LLM_MODEL.startswith('gpt') else "ChatGPT / LLM 学术优化"
|
||||
initial_prompt = "Serve me as a writing and programming assistant."
|
||||
title_html = f"<h1 align=\"center\">{title}</h1>"
|
||||
title_html = """<h1 align="center">ChatGPT 学术优化</h1>"""
|
||||
|
||||
# 问询记录, python 版本建议3.9+(越新越好)
|
||||
import logging
|
||||
@ -120,7 +119,7 @@ with gr.Blocks(theme=set_theme, analytics_enabled=False, css=advanced_css) as de
|
||||
dropdown.select(on_dropdown_changed, [dropdown], [switchy_bt] )
|
||||
# 随变按钮的回调函数注册
|
||||
def route(k, *args, **kwargs):
|
||||
if k in [r"打开插件列表", r"先从插件列表中选择"]: return
|
||||
if k in [r"打开插件列表", r"请先从插件列表中选择"]: return
|
||||
yield from crazy_fns[k]["Function"](*args, **kwargs)
|
||||
click_handle = switchy_bt.click(route,[switchy_bt, *input_combo, gr.State(PORT)], output_combo)
|
||||
click_handle.then(on_report_generated, [file_upload, chatbot], [file_upload, chatbot])
|
||||
@ -141,5 +140,5 @@ def auto_opentab_delay():
|
||||
threading.Thread(target=open, name="open-browser", daemon=True).start()
|
||||
|
||||
auto_opentab_delay()
|
||||
demo.title = title
|
||||
demo.title = "ChatGPT 学术优化"
|
||||
demo.queue(concurrency_count=CONCURRENT_COUNT).launch(server_name="0.0.0.0", share=True, server_port=PORT, auth=AUTHENTICATION)
|
||||
|
11
predict.py
11
predict.py
@ -112,7 +112,8 @@ def predict_no_ui_long_connection(inputs, top_p, temperature, history=[], sys_pr
|
||||
return result
|
||||
|
||||
|
||||
def predict(inputs, top_p, temperature, chatbot=[], history=[], system_prompt='', stream = True, additional_fn=None):
|
||||
def predict(inputs, top_p, temperature, chatbot=[], history=[], system_prompt='',
|
||||
stream = True, additional_fn=None):
|
||||
"""
|
||||
发送至chatGPT,流式获取输出。
|
||||
用于基础的对话功能。
|
||||
@ -243,11 +244,3 @@ def generate_payload(inputs, top_p, temperature, history, system_prompt, stream)
|
||||
return headers,payload
|
||||
|
||||
|
||||
if not LLM_MODEL.startswith('gpt'):
|
||||
# 函数重载到另一个文件
|
||||
from request_llm.bridge_tgui import predict_tgui, predict_tgui_no_ui
|
||||
predict = predict_tgui
|
||||
predict_no_ui = predict_tgui_no_ui
|
||||
predict_no_ui_long_connection = predict_tgui_no_ui
|
||||
|
||||
|
Loading…
x
Reference in New Issue
Block a user