add proxy debug funtion

This commit is contained in:
qingxu fu 2023-03-22 17:25:37 +08:00
parent f8a44a82a9
commit d00f6bb1a6
5 changed files with 84 additions and 52 deletions

25
check_proxy.py Normal file
View File

@ -0,0 +1,25 @@
"""
用python的requests库查询本机ip地址所在地
ChatGPT:
"""
def check_proxy(proxies):
import requests
try:
response = requests.get("https://ipapi.co/json/", proxies=proxies, timeout=4)
data = response.json()
country = data['country_name']
# city = data['city']
proxies_https = proxies['https'] if proxies is not None else ''
result = f"代理配置 {proxies_https}, 代理所在地:{country}"
print(result)
return result
except:
result = f"代理配置 {proxies_https}, 代理所在地查询超时,代理可能无效"
print(result)
return result
if __name__ == '__main__':
from config import proxies
check_proxy(proxies)

View File

@ -2,14 +2,22 @@
API_KEY = "sk-此处填API秘钥"
API_URL = "https://api.openai.com/v1/chat/completions"
# 改为True应用代理
USE_PROXY = False
if USE_PROXY:
# 代理网络的地址,打开你的科学上网软件查看代理的协议(socks5/http)、地址(localhost)和端口(11284)
proxies = { "http": "socks5h://localhost:11284", "https": "socks5h://localhost:11284", }
print('网络代理状态:运行。')
else:
proxies = None
print('网络代理状态:未配置。无代理状态下很可能无法访问。')
# avoid dummy
# 发送请求到OpenAI后等待多久判定为超时
TIMEOUT_SECONDS = 20
# 网页的端口, -1代表随机端口
WEB_PORT = -1
# 检查一下是不是忘了改config
if API_KEY == "sk-此处填API秘钥":
assert False, "请在config文件中修改API密钥, 添加海外代理之后再运行"

View File

@ -10,19 +10,16 @@ def get_functionals():
"英语学术润色": {
"Prefix": "Below is a paragraph from an academic paper. Polish the writing to meet the academic style, \
improve the spelling, grammar, clarity, concision and overall readability. When neccessary, rewrite the whole sentence. \
Furthermore, list all modification and explain the reasons to do so in markdown table.\n\n",
"Button": None,
"Suffix": "",
"Color": "stop",
Furthermore, list all modification and explain the reasons to do so in markdown table.\n\n", # 前言
"Suffix": "", # 后语
"Color": "stop", # 按钮颜色
},
"中文学术润色": {
"Prefix": "作为一名中文学术论文写作改进助理,你的任务是改进所提供文本的拼写、语法、清晰、简洁和整体可读性,同时分解长句,减少重复,并提供改进建议。请只提供文本的更正版本,避免包括解释。请编辑以下文本:\n\n",
"Button": None,
"Suffix": "",
},
"查找语法错误": {
"Prefix": "Below is a paragraph from an academic paper. Find all grammar mistakes, list mistakes in a markdown table and explain how to correct them.\n\n",
"Button": None,
"Suffix": "",
},
"中英互译": {
@ -37,28 +34,23 @@ When providing translations, please use Chinese to explain each sentences ten
For phrases or individual words that require translation, provide the source (dictionary) for each one.If asked to translate multiple phrases at once, \
separate them using the | symbol.Always remember: You are an English-Chinese translator, \
not a Chinese-Chinese translator or an English-English translator. Below is the text you need to translate: \n\n",
"Button": None,
"Suffix": "",
"Color": "stop",
},
"中译英": {
"Prefix": "Please translate following sentence to English: \n\n",
"Button": None,
"Suffix": "",
},
"学术中译英": {
"Prefix": "Please translate following sentence to English with academic writing, and provide some related authoritative examples: \n\n",
"Button": None,
"Suffix": "",
},
"英译中": {
"Prefix": "请翻译成中文:\n\n",
"Button": None,
"Suffix": "",
},
"解释代码": {
"Prefix": "请解释以下代码:\n```\n",
"Button": None,
"Suffix": "\n```\n",
"Color": "stop",
},

View File

@ -3,6 +3,7 @@ import os
import markdown, mdtex2html
from predict import predict
from show_math import convert as convert_math
from config import proxies, WEB_PORT
def find_free_port():
import socket
@ -11,8 +12,8 @@ def find_free_port():
s.bind(('', 0))
s.setsockopt(socket.SOL_SOCKET, socket.SO_REUSEADDR, 1)
return s.getsockname()[1]
PORT = find_free_port()
PORT = find_free_port() if WEB_PORT <= 0 else WEB_PORT
initial_prompt = "Serve me as a writing and programming assistant."
title_html = """<h1 align="center">ChatGPT 学术优化</h1>"""
@ -81,8 +82,8 @@ with gr.Blocks() as demo:
for k in functional:
variant = functional[k]["Color"] if "Color" in functional[k] else "secondary"
functional[k]["Button"] = gr.Button(k, variant=variant)
statusDisplay = gr.Markdown("status: ready")
from check_proxy import check_proxy
statusDisplay = gr.Markdown(f"{check_proxy(proxies)}")
systemPromptTxt = gr.Textbox(show_label=True, placeholder=f"System Prompt", label="System prompt", value=initial_prompt).style(container=True)
#inputs, top_p, temperature, top_k, repetition_penalty
with gr.Accordion("arguments", open=False):

View File

@ -8,11 +8,11 @@ import os
if os.path.exists('config_private.py'):
# 放自己的秘密如API和代理网址
from config_private import proxies, API_URL, API_KEY
from config_private import proxies, API_URL, API_KEY, TIMEOUT_SECONDS
else:
from config import proxies, API_URL, API_KEY
from config import proxies, API_URL, API_KEY, TIMEOUT_SECONDS
timeout_bot_msg = 'Request timeout, network error. please check proxy settings in config.py.'
def compose_system(system_prompt):
return {"role": "system", "content": system_prompt}
@ -35,7 +35,7 @@ def predict(inputs, top_p, temperature, chatbot=[], history=[], system_prompt=''
raw_input = inputs
logging.info(f'[raw_input] {raw_input}')
chatbot.append((inputs, ""))
yield chatbot, history, "Waiting"
yield chatbot, history, "等待响应"
headers = {
"Content-Type": "application/json",
@ -49,26 +49,29 @@ def predict(inputs, top_p, temperature, chatbot=[], history=[], system_prompt=''
messages = [compose_system(system_prompt)]
if chat_counter:
for index in range(0, 2*chat_counter, 2):
d1 = {}
d1["role"] = "user"
d1["content"] = history[index]
d2 = {}
d2["role"] = "assistant"
d2["content"] = history[index+1]
if d1["content"] != "":
if d2["content"] != "" or retry:
messages.append(d1)
messages.append(d2)
what_i_have_asked = {}
what_i_have_asked["role"] = "user"
what_i_have_asked["content"] = history[index]
what_gpt_answer = {}
what_gpt_answer["role"] = "assistant"
what_gpt_answer["content"] = history[index+1]
if what_i_have_asked["content"] != "":
if not (what_gpt_answer["content"] != "" or retry): continue
if what_gpt_answer["content"] == timeout_bot_msg: continue
messages.append(what_i_have_asked)
messages.append(what_gpt_answer)
else:
messages[-1]['content'] = d2['content']
messages[-1]['content'] = what_gpt_answer['content']
if retry and chat_counter:
messages.pop()
else:
temp3 = {}
temp3["role"] = "user"
temp3["content"] = inputs
messages.append(temp3)
what_i_ask_now = {}
what_i_ask_now["role"] = "user"
what_i_ask_now["content"] = inputs
messages.append(what_i_ask_now)
chat_counter += 1
# messages
payload = {
"model": "gpt-3.5-turbo",
@ -87,10 +90,10 @@ def predict(inputs, top_p, temperature, chatbot=[], history=[], system_prompt=''
try:
# make a POST request to the API endpoint using the requests.post method, passing in stream=True
response = requests.post(API_URL, headers=headers, proxies=proxies,
json=payload, stream=True, timeout=15)
json=payload, stream=True, timeout=TIMEOUT_SECONDS)
except:
chatbot[-1] = ((chatbot[-1][0], 'Request timeout, network error. please check proxy settings in config.py.'))
yield chatbot, history, "Requests Timeout"
chatbot[-1] = ((chatbot[-1][0], timeout_bot_msg))
yield chatbot, history, "请求超时"
raise TimeoutError
token_counter = 0
@ -101,8 +104,6 @@ def predict(inputs, top_p, temperature, chatbot=[], history=[], system_prompt=''
stream_response = response.iter_lines()
while True:
chunk = next(stream_response)
# print(chunk)
if chunk == b'data: [DONE]':
break
@ -119,16 +120,21 @@ def predict(inputs, top_p, temperature, chatbot=[], history=[], system_prompt=''
break
except Exception as e:
traceback.print_exc()
print(chunk.decode())
chunkjson = json.loads(chunk.decode()[6:])
status_text = f"id: {chunkjson['id']}, finish_reason: {chunkjson['choices'][0]['finish_reason']}"
partial_words = partial_words + \
json.loads(chunk.decode()[6:])[
'choices'][0]["delta"]["content"]
if token_counter == 0:
history.append(" " + partial_words)
else:
history[-1] = partial_words
chatbot[-1] = (history[-2], history[-1])
token_counter += 1
yield chatbot, history, status_text
try:
chunkjson = json.loads(chunk.decode()[6:])
status_text = f"finish_reason: {chunkjson['choices'][0]['finish_reason']}"
partial_words = partial_words + json.loads(chunk.decode()[6:])['choices'][0]["delta"]["content"]
if token_counter == 0:
history.append(" " + partial_words)
else:
history[-1] = partial_words
chatbot[-1] = (history[-2], history[-1])
token_counter += 1
yield chatbot, history, status_text
except Exception as e:
traceback.print_exc()
print(chunk.decode())
yield chatbot, history, "Json解析不合常规"