正确地显示requests错误

This commit is contained in:
Your Name 2023-03-23 00:34:55 +08:00
parent b3a67b84b9
commit ac2c8cab1f
4 changed files with 153 additions and 54 deletions

16
.vscode/launch.json vendored Normal file
View File

@ -0,0 +1,16 @@
{
// Use IntelliSense to learn about possible attributes.
// Hover to view descriptions of existing attributes.
// For more information, visit: https://go.microsoft.com/fwlink/?linkid=830387
"version": "0.2.0",
"configurations": [
{
"name": "Python: Current File",
"type": "python",
"request": "launch",
"program": "${file}",
"console": "integratedTerminal",
"justMyCode": false
}
]
}

View File

@ -12,7 +12,7 @@ def get_functionals():
improve the spelling, grammar, clarity, concision and overall readability. When neccessary, rewrite the whole sentence. \ improve the spelling, grammar, clarity, concision and overall readability. When neccessary, rewrite the whole sentence. \
Furthermore, list all modification and explain the reasons to do so in markdown table.\n\n", # 前言 Furthermore, list all modification and explain the reasons to do so in markdown table.\n\n", # 前言
"Suffix": "", # 后语 "Suffix": "", # 后语
"Color": "stop", # 按钮颜色 "Color": "secondary", # 按钮颜色
}, },
"中文学术润色": { "中文学术润色": {
"Prefix": "作为一名中文学术论文写作改进助理,你的任务是改进所提供文本的拼写、语法、清晰、简洁和整体可读性,同时分解长句,减少重复,并提供改进建议。请只提供文本的更正版本,避免包括解释。请编辑以下文本:\n\n", "Prefix": "作为一名中文学术论文写作改进助理,你的任务是改进所提供文本的拼写、语法、清晰、简洁和整体可读性,同时分解长句,减少重复,并提供改进建议。请只提供文本的更正版本,避免包括解释。请编辑以下文本:\n\n",
@ -35,7 +35,7 @@ For phrases or individual words that require translation, provide the source (di
separate them using the | symbol.Always remember: You are an English-Chinese translator, \ separate them using the | symbol.Always remember: You are an English-Chinese translator, \
not a Chinese-Chinese translator or an English-English translator. Below is the text you need to translate: \n\n", not a Chinese-Chinese translator or an English-English translator. Below is the text you need to translate: \n\n",
"Suffix": "", "Suffix": "",
"Color": "stop", "Color": "secondary",
}, },
"中译英": { "中译英": {
"Prefix": "Please translate following sentence to English: \n\n", "Prefix": "Please translate following sentence to English: \n\n",
@ -52,7 +52,7 @@ not a Chinese-Chinese translator or an English-English translator. Below is the
"解释代码": { "解释代码": {
"Prefix": "请解释以下代码:\n```\n", "Prefix": "请解释以下代码:\n```\n",
"Suffix": "\n```\n", "Suffix": "\n```\n",
"Color": "stop", "Color": "secondary",
}, },
} }

View File

@ -3,6 +3,9 @@
# 'secondary' for a more subdued style, # 'secondary' for a more subdued style,
# 'stop' for a stop button. # 'stop' for a stop button.
# """ # """
fast_debug = False
def 自我程序解构简单案例(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT): def 自我程序解构简单案例(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT):
import time import time
from predict import predict_no_ui_no_history from predict import predict_no_ui_no_history
@ -25,11 +28,12 @@ def 解析项目本身(txt, top_p, temperature, chatbot, history, systemPromptTx
file_content = f.read() file_content = f.read()
前言 = "接下来请你分析自己的程序构成,别紧张," if index==0 else "" 前言 = "接下来请你分析自己的程序构成,别紧张," if index==0 else ""
i_say = f'请对下面的程序文件做一个概述: ```{file_content}```' i_say = 前言 + f'请对下面的程序文件做一个概述文件名是{fp},文件代码是 ```{file_content}```'
i_say_show_user = 前言 + f'请对下面的程序文件做一个概述: {os.path.abspath(fp)}' i_say_show_user = 前言 + f'[{index}/{len(file_manifest)}] 请对下面的程序文件做一个概述: {os.path.abspath(fp)}'
chatbot.append((i_say_show_user, "[waiting gpt response]")) chatbot.append((i_say_show_user, "[waiting gpt response]"))
yield chatbot, history, '正常' yield chatbot, history, '正常'
if not fast_debug:
# ** gpt request ** # ** gpt request **
gpt_say = predict_no_ui(inputs=i_say, top_p=top_p, temperature=temperature) gpt_say = predict_no_ui(inputs=i_say, top_p=top_p, temperature=temperature)
@ -42,6 +46,7 @@ def 解析项目本身(txt, top_p, temperature, chatbot, history, systemPromptTx
chatbot.append((i_say, "[waiting gpt response]")) chatbot.append((i_say, "[waiting gpt response]"))
yield chatbot, history, '正常' yield chatbot, history, '正常'
if not fast_debug:
# ** gpt request ** # ** gpt request **
gpt_say = predict_no_ui(inputs=i_say, top_p=top_p, temperature=temperature, history=history) gpt_say = predict_no_ui(inputs=i_say, top_p=top_p, temperature=temperature, history=history)
@ -49,66 +54,128 @@ def 解析项目本身(txt, top_p, temperature, chatbot, history, systemPromptTx
history.append(i_say); history.append(gpt_say) history.append(i_say); history.append(gpt_say)
yield chatbot, history, '正常' yield chatbot, history, '正常'
def report_execption(chatbot, history, a, b):
chatbot.append((a, b))
history.append(a); history.append(b)
def 解析一个Python项目(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT):
def 解析源代码(file_manifest, project_folder, top_p, temperature, chatbot, history, systemPromptTxt):
import time, glob, os import time, glob, os
from predict import predict_no_ui from predict import predict_no_ui
if os.path.exists(txt):
project_folder = txt
else:
if txt == "": txt = '空空如也的输入栏'
chatbot.append((f"解析项目: {txt}", f"找不到本地项目: {txt}"))
history.append(f"解析项目: {txt}"); history.append(f"找不到本地项目: {txt}")
yield chatbot, history, '正常'
return
file_manifest = [f for f in glob.glob(f'{project_folder}/*.py')]
print('begin analysis on:', file_manifest) print('begin analysis on:', file_manifest)
for index, fp in enumerate(file_manifest): for index, fp in enumerate(file_manifest):
with open(fp, 'r', encoding='utf-8') as f: with open(fp, 'r', encoding='utf-8') as f:
file_content = f.read() file_content = f.read()
前言 = "接下来请你逐文件分析下面的Python工程" if index==0 else "" 前言 = "接下来请你逐文件分析下面的工程" if index==0 else ""
i_say = f'请对下面的程序文件做一个概述: ```{file_content}```' i_say = 前言 + f'请对下面的程序文件做一个概述文件名是{os.path.relpath(fp, project_folder)},文件代码是 ```{file_content}```'
i_say_show_user = 前言 + f'[{index}/{len(file_manifest)}] 请对下面的程序文件做一个概述: {os.path.abspath(fp)}' i_say_show_user = 前言 + f'[{index}/{len(file_manifest)}] 请对下面的程序文件做一个概述: {os.path.abspath(fp)}'
chatbot.append((i_say_show_user, "[waiting gpt response]")) chatbot.append((i_say_show_user, "[waiting gpt response]"))
print('[1] yield chatbot, history') print('[1] yield chatbot, history')
yield chatbot, history, '正常' yield chatbot, history, '正常'
if not fast_debug:
msg = '正常'
# ** gpt request ** # ** gpt request **
while True:
try:
gpt_say = predict_no_ui(inputs=i_say, top_p=top_p, temperature=temperature) gpt_say = predict_no_ui(inputs=i_say, top_p=top_p, temperature=temperature)
break
except ConnectionAbortedError as e:
i_say = i_say[:len(i_say)//2]
msg = '文件太长,进行了拦腰截断'
print('[2] end gpt req') print('[2] end gpt req')
chatbot[-1] = (i_say_show_user, gpt_say) chatbot[-1] = (i_say_show_user, gpt_say)
history.append(i_say_show_user); history.append(gpt_say) history.append(i_say_show_user); history.append(gpt_say)
print('[3] yield chatbot, history') print('[3] yield chatbot, history')
yield chatbot, history, '正常' yield chatbot, history, msg
print('[4] next') print('[4] next')
time.sleep(2) if not fast_debug: time.sleep(2)
i_say = f'根据以上你自己的分析对程序的整体功能和构架做出概括。然后用一张markdown表格整理每个文件的功能包括{file_manifest})。' all_file = ', '.join([os.path.relpath(fp, project_folder) for index, fp in enumerate(file_manifest)])
i_say = f'根据以上你自己的分析对程序的整体功能和构架做出概括。然后用一张markdown表格整理每个文件的功能包括{all_file})。'
chatbot.append((i_say, "[waiting gpt response]")) chatbot.append((i_say, "[waiting gpt response]"))
yield chatbot, history, '正常' yield chatbot, history, '正常'
if not fast_debug:
msg = '正常'
# ** gpt request ** # ** gpt request **
while True:
try:
gpt_say = predict_no_ui(inputs=i_say, top_p=top_p, temperature=temperature, history=history) gpt_say = predict_no_ui(inputs=i_say, top_p=top_p, temperature=temperature, history=history)
break
except ConnectionAbortedError as e:
history = [his[len(his)//2:] for his in history]
msg = '对话历史太长,每段历史拦腰截断'
chatbot[-1] = (i_say, gpt_say) chatbot[-1] = (i_say, gpt_say)
history.append(i_say); history.append(gpt_say) history.append(i_say); history.append(gpt_say)
yield chatbot, history, msg
def 解析一个Python项目(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT):
import glob, os
if os.path.exists(txt):
project_folder = txt
else:
if txt == "": txt = '空空如也的输入栏'
report_execption(chatbot, history, a = f"解析项目: {txt}", b = f"找不到本地项目: {txt}")
yield chatbot, history, '正常' yield chatbot, history, '正常'
return
file_manifest = [f for f in glob.glob(f'{project_folder}/**/*.py', recursive=True)]
if len(file_manifest) == 0:
report_execption(chatbot, history, a = f"解析项目: {txt}", b = f"找不到任何python文件: {txt}")
yield chatbot, history, '正常'
return
yield from 解析源代码(file_manifest, project_folder, top_p, temperature, chatbot, history, systemPromptTxt)
def 解析一个C项目的头文件(txt, top_p, temperature, chatbot, history, systemPromptTxt, WEB_PORT):
import glob, os
if os.path.exists(txt):
project_folder = txt
else:
if txt == "": txt = '空空如也的输入栏'
report_execption(chatbot, history, a = f"解析项目: {txt}", b = f"找不到本地项目: {txt}")
yield chatbot, history, '正常'
return
file_manifest = [f for f in glob.glob(f'{project_folder}/**/*.h', recursive=True)] # + \
# [f for f in glob.glob(f'{project_folder}/**/*.cpp', recursive=True)] + \
# [f for f in glob.glob(f'{project_folder}/**/*.c', recursive=True)]
if len(file_manifest) == 0:
report_execption(chatbot, history, a = f"解析项目: {txt}", b = f"找不到任何.h/.cpp/.c文件: {txt}")
yield chatbot, history, '正常'
return
yield from 解析源代码(file_manifest, project_folder, top_p, temperature, chatbot, history, systemPromptTxt)
def get_crazy_functionals(): def get_crazy_functionals():
return { return {
"程序解构简单案例": { "程序解构简单案例": {
"Color": "stop", # 按钮颜色
"Function": 自我程序解构简单案例 "Function": 自我程序解构简单案例
}, },
"请解析并解构此项目本身": { "请解析并解构此项目本身": {
"Color": "stop", # 按钮颜色
"Function": 解析项目本身 "Function": 解析项目本身
}, },
"解析一整个Python项目输入栏给定项目完整目录": { "解析一整个Python项目输入栏给定项目完整目录": {
"Color": "stop", # 按钮颜色
"Function": 解析一个Python项目 "Function": 解析一个Python项目
}, },
"解析一整个C++项目的头文件(输入栏给定项目完整目录)": {
"Color": "stop", # 按钮颜色
"Function": 解析一个C项目的头文件
},
} }

View File

@ -14,6 +14,13 @@ except: from config import proxies, API_URL, API_KEY, TIMEOUT_SECONDS
timeout_bot_msg = 'Request timeout, network error. please check proxy settings in config.py.' timeout_bot_msg = 'Request timeout, network error. please check proxy settings in config.py.'
def get_full_error(chunk, stream_response):
while True:
try:
chunk += next(stream_response)
except:
break
return chunk
def predict_no_ui(inputs, top_p, temperature, history=[]): def predict_no_ui(inputs, top_p, temperature, history=[]):
messages = [{"role": "system", "content": ""}] messages = [{"role": "system", "content": ""}]
@ -60,10 +67,17 @@ def predict_no_ui(inputs, top_p, temperature, history=[]):
# make a POST request to the API endpoint using the requests.post method, passing in stream=True # make a POST request to the API endpoint using the requests.post method, passing in stream=True
response = requests.post(API_URL, headers=headers, proxies=proxies, response = requests.post(API_URL, headers=headers, proxies=proxies,
json=payload, stream=True, timeout=TIMEOUT_SECONDS*2) json=payload, stream=True, timeout=TIMEOUT_SECONDS*2)
except: except Exception as e:
traceback.print_exc()
raise TimeoutError raise TimeoutError
return json.loads(response.text)["choices"][0]["message"]["content"] try:
result = json.loads(response.text)["choices"][0]["message"]["content"]
return result
except Exception as e:
if "choices" not in response.text: print(response.text)
raise ConnectionAbortedError("Json解析不合常规可能是文本过长" + response.text)
@ -163,11 +177,6 @@ def predict(inputs, top_p, temperature, chatbot=[], history=[], system_prompt=''
if len(json.loads(chunk.decode()[6:])['choices'][0]["delta"]) == 0: if len(json.loads(chunk.decode()[6:])['choices'][0]["delta"]) == 0:
logging.info(f'[response] {chatbot[-1][-1]}') logging.info(f'[response] {chatbot[-1][-1]}')
break break
except Exception as e:
traceback.print_exc()
print(chunk.decode())
try:
chunkjson = json.loads(chunk.decode()[6:]) chunkjson = json.loads(chunk.decode()[6:])
status_text = f"finish_reason: {chunkjson['choices'][0]['finish_reason']}" status_text = f"finish_reason: {chunkjson['choices'][0]['finish_reason']}"
partial_words = partial_words + json.loads(chunk.decode()[6:])['choices'][0]["delta"]["content"] partial_words = partial_words + json.loads(chunk.decode()[6:])['choices'][0]["delta"]["content"]
@ -181,5 +190,12 @@ def predict(inputs, top_p, temperature, chatbot=[], history=[], system_prompt=''
except Exception as e: except Exception as e:
traceback.print_exc() traceback.print_exc()
print(chunk.decode()) yield chatbot, history, "Json解析不合常规很可能是文本过长"
yield chatbot, history, "Json解析不合常规" chunk = get_full_error(chunk, stream_response)
error_msg = chunk.decode()
if "reduce the length" in error_msg:
chatbot[-1] = (history[-1], "老铁,输入的文本太长了")
yield chatbot, history, "Json解析不合常规很可能是文本过长" + error_msg
return