修复get_conf接口

This commit is contained in:
binary-husky 2023-10-30 11:10:05 +08:00
parent 527f9d28ad
commit 9a1aff5bb6
5 changed files with 5 additions and 6 deletions

View File

@ -433,7 +433,7 @@ def main():
server_port=PORT, server_port=PORT,
favicon_path=os.path.join(os.path.dirname(__file__), "docs/logo.png"), favicon_path=os.path.join(os.path.dirname(__file__), "docs/logo.png"),
auth=AUTHENTICATION if len(AUTHENTICATION) != 0 else None, auth=AUTHENTICATION if len(AUTHENTICATION) != 0 else None,
blocked_paths=["config.py","config_private.py","docker-compose.yml","Dockerfile"]) blocked_paths=["config.py","config_private.py","docker-compose.yml","Dockerfile","gpt_log/admin"])
# 如果需要在二级路径下运行 # 如果需要在二级路径下运行
# CUSTOM_PATH = get_conf('CUSTOM_PATH') # CUSTOM_PATH = get_conf('CUSTOM_PATH')

View File

@ -75,7 +75,7 @@ def generate_message_payload(inputs, llm_kwargs, history, system_prompt):
def generate_from_baidu_qianfan(inputs, llm_kwargs, history, system_prompt): def generate_from_baidu_qianfan(inputs, llm_kwargs, history, system_prompt):
BAIDU_CLOUD_QIANFAN_MODEL, = get_conf('BAIDU_CLOUD_QIANFAN_MODEL') BAIDU_CLOUD_QIANFAN_MODEL = get_conf('BAIDU_CLOUD_QIANFAN_MODEL')
url_lib = { url_lib = {
"ERNIE-Bot": "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/completions" , "ERNIE-Bot": "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/completions" ,

View File

@ -8,7 +8,7 @@ from multiprocessing import Process, Pipe
model_name = '星火认知大模型' model_name = '星火认知大模型'
def validate_key(): def validate_key():
XFYUN_APPID, = get_conf('XFYUN_APPID', ) XFYUN_APPID = get_conf('XFYUN_APPID')
if XFYUN_APPID == '00000000' or XFYUN_APPID == '': if XFYUN_APPID == '00000000' or XFYUN_APPID == '':
return False return False
return True return True

View File

@ -36,7 +36,7 @@ try:
CHANNEL_ID = None CHANNEL_ID = None
async def open_channel(self): async def open_channel(self):
response = await self.conversations_open(users=get_conf('SLACK_CLAUDE_BOT_ID')[0]) response = await self.conversations_open(users=get_conf('SLACK_CLAUDE_BOT_ID'))
self.CHANNEL_ID = response["channel"]["id"] self.CHANNEL_ID = response["channel"]["id"]
async def chat(self, text): async def chat(self, text):
@ -51,7 +51,7 @@ try:
# TODO暂时不支持历史消息因为在同一个频道里存在多人使用时历史消息渗透问题 # TODO暂时不支持历史消息因为在同一个频道里存在多人使用时历史消息渗透问题
resp = await self.conversations_history(channel=self.CHANNEL_ID, oldest=self.LAST_TS, limit=1) resp = await self.conversations_history(channel=self.CHANNEL_ID, oldest=self.LAST_TS, limit=1)
msg = [msg for msg in resp["messages"] msg = [msg for msg in resp["messages"]
if msg.get("user") == get_conf('SLACK_CLAUDE_BOT_ID')[0]] if msg.get("user") == get_conf('SLACK_CLAUDE_BOT_ID')]
return msg return msg
except (SlackApiError, KeyError) as e: except (SlackApiError, KeyError) as e:
raise RuntimeError(f"获取Slack消息失败。") raise RuntimeError(f"获取Slack消息失败。")

View File

@ -732,7 +732,6 @@ def select_api_key(keys, llm_model):
raise RuntimeError(f"您提供的api-key不满足要求不包含任何可用于{llm_model}的api-key。您可能选择了错误的模型或请求源右下角更换模型菜单中可切换openai,azure,claude,api2d等请求源") raise RuntimeError(f"您提供的api-key不满足要求不包含任何可用于{llm_model}的api-key。您可能选择了错误的模型或请求源右下角更换模型菜单中可切换openai,azure,claude,api2d等请求源")
api_key = random.choice(avail_key_list) # 随机负载均衡 api_key = random.choice(avail_key_list) # 随机负载均衡
if ENABLE
return api_key return api_key
def read_env_variable(arg, default_value): def read_env_variable(arg, default_value):