Merge branch 'master' into frontier
This commit is contained in:
commit
40a065ce04
12
README.md
12
README.md
@ -1,6 +1,6 @@
|
|||||||
> **Note**
|
> **Note**
|
||||||
>
|
>
|
||||||
> 2023.10.8: Gradio, Pydantic依赖调整,已修改 `requirements.txt`。请及时**更新代码**,安装依赖时,请严格选择`requirements.txt`中**指定的版本**。
|
> 2023.10.28: 紧急修复了若干问题,安装依赖时,请选择`requirements.txt`中**指定的版本**。
|
||||||
>
|
>
|
||||||
> `pip install -r requirements.txt`
|
> `pip install -r requirements.txt`
|
||||||
|
|
||||||
@ -310,6 +310,7 @@ Tip:不指定文件直接点击 `载入对话历史存档` 可以查看历史h
|
|||||||
|
|
||||||
### II:版本:
|
### II:版本:
|
||||||
- version 3.60(todo): 优化虚空终端,引入code interpreter和更多插件
|
- version 3.60(todo): 优化虚空终端,引入code interpreter和更多插件
|
||||||
|
- version 3.56: 支持动态追加基础功能按钮,新汇报PDF汇总页面
|
||||||
- version 3.55: 重构前端界面,引入悬浮窗口与菜单栏
|
- version 3.55: 重构前端界面,引入悬浮窗口与菜单栏
|
||||||
- version 3.54: 新增动态代码解释器(Code Interpreter)(待完善)
|
- version 3.54: 新增动态代码解释器(Code Interpreter)(待完善)
|
||||||
- version 3.53: 支持动态选择不同界面主题,提高稳定性&解决多用户冲突问题
|
- version 3.53: 支持动态选择不同界面主题,提高稳定性&解决多用户冲突问题
|
||||||
@ -344,7 +345,14 @@ GPT Academic开发者QQ群:`610599535`
|
|||||||
1. `Chuanhu-Small-and-Beautiful` [网址](https://github.com/GaiZhenbiao/ChuanhuChatGPT/)
|
1. `Chuanhu-Small-and-Beautiful` [网址](https://github.com/GaiZhenbiao/ChuanhuChatGPT/)
|
||||||
|
|
||||||
|
|
||||||
### IV:参考与学习
|
### IV:本项目的开发分支
|
||||||
|
|
||||||
|
1. `master` 分支: 主分支,稳定版
|
||||||
|
|
||||||
|
2. `frontier` 分支: 开发分支,测试版
|
||||||
|
|
||||||
|
|
||||||
|
### V:参考与学习
|
||||||
|
|
||||||
```
|
```
|
||||||
代码中参考了很多其他优秀项目中的设计,顺序不分先后:
|
代码中参考了很多其他优秀项目中的设计,顺序不分先后:
|
||||||
|
@ -49,6 +49,10 @@ THEME = "Default"
|
|||||||
AVAIL_THEMES = ["Default", "Chuanhu-Small-and-Beautiful", "High-Contrast", "Gstaff/Xkcd", "NoCrypt/Miku"]
|
AVAIL_THEMES = ["Default", "Chuanhu-Small-and-Beautiful", "High-Contrast", "Gstaff/Xkcd", "NoCrypt/Miku"]
|
||||||
|
|
||||||
|
|
||||||
|
# 默认的系统提示词(system prompt)
|
||||||
|
INIT_SYS_PROMPT = "Serve me as a writing and programming assistant."
|
||||||
|
|
||||||
|
|
||||||
# 对话窗的高度 (仅在LAYOUT="TOP-DOWN"时生效)
|
# 对话窗的高度 (仅在LAYOUT="TOP-DOWN"时生效)
|
||||||
CHATBOT_HEIGHT = 1115
|
CHATBOT_HEIGHT = 1115
|
||||||
|
|
||||||
|
@ -7,25 +7,25 @@
|
|||||||
## 方法二(新方法,接入多个Azure模型,并支持动态切换)
|
## 方法二(新方法,接入多个Azure模型,并支持动态切换)
|
||||||
|
|
||||||
- 在方法一的基础上,注册并获取多组 AZURE_ENDPOINT,AZURE_API_KEY,AZURE_ENGINE
|
- 在方法一的基础上,注册并获取多组 AZURE_ENDPOINT,AZURE_API_KEY,AZURE_ENGINE
|
||||||
- 修改config中的AZURE_CFG_ARRAY配置项,按照格式填入多个Azure模型的配置,如下所示:
|
- 修改config中的AZURE_CFG_ARRAY和AVAIL_LLM_MODELS配置项,按照格式填入多个Azure模型的配置,如下所示:
|
||||||
|
|
||||||
```
|
```
|
||||||
AZURE_CFG_ARRAY = {
|
AZURE_CFG_ARRAY = {
|
||||||
"azure-gpt-3.5": # 第一个模型,azure模型必须以"azure-"开头
|
"azure-gpt-3.5": # 第一个模型,azure模型必须以"azure-"开头,注意您还需要将"azure-gpt-3.5"加入AVAIL_LLM_MODELS(模型下拉菜单)
|
||||||
{
|
{
|
||||||
"AZURE_ENDPOINT": "https://你亲手写的api名称.openai.azure.com/",
|
"AZURE_ENDPOINT": "https://你亲手写的api名称.openai.azure.com/",
|
||||||
"AZURE_API_KEY": "cccccccccccccccccccccccccccccccc",
|
"AZURE_API_KEY": "cccccccccccccccccccccccccccccccc",
|
||||||
"AZURE_ENGINE": "填入你亲手写的部署名1",
|
"AZURE_ENGINE": "填入你亲手写的部署名1",
|
||||||
"AZURE_MODEL_MAX_TOKEN": 4096,
|
"AZURE_MODEL_MAX_TOKEN": 4096,
|
||||||
},
|
},
|
||||||
"azure-gpt-4": # 第二个模型,azure模型必须以"azure-"开头
|
"azure-gpt-4": # 第二个模型,azure模型必须以"azure-"开头,注意您还需要将"azure-gpt-4"加入AVAIL_LLM_MODELS(模型下拉菜单)
|
||||||
{
|
{
|
||||||
"AZURE_ENDPOINT": "https://你亲手写的api名称.openai.azure.com/",
|
"AZURE_ENDPOINT": "https://你亲手写的api名称.openai.azure.com/",
|
||||||
"AZURE_API_KEY": "dddddddddddddddddddddddddddddddd",
|
"AZURE_API_KEY": "dddddddddddddddddddddddddddddddd",
|
||||||
"AZURE_ENGINE": "填入你亲手写的部署名2",
|
"AZURE_ENGINE": "填入你亲手写的部署名2",
|
||||||
"AZURE_MODEL_MAX_TOKEN": 8192,
|
"AZURE_MODEL_MAX_TOKEN": 8192,
|
||||||
},
|
},
|
||||||
"azure-gpt-3.5-16k": # 第三个模型,azure模型必须以"azure-"开头
|
"azure-gpt-3.5-16k": # 第三个模型,azure模型必须以"azure-"开头,注意您还需要将"azure-gpt-3.5-16k"加入AVAIL_LLM_MODELS(模型下拉菜单)
|
||||||
{
|
{
|
||||||
"AZURE_ENDPOINT": "https://你亲手写的api名称.openai.azure.com/",
|
"AZURE_ENDPOINT": "https://你亲手写的api名称.openai.azure.com/",
|
||||||
"AZURE_API_KEY": "eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee",
|
"AZURE_API_KEY": "eeeeeeeeeeeeeeeeeeeeeeeeeeeeeeee",
|
||||||
|
4
main.py
4
main.py
@ -14,13 +14,13 @@ def main():
|
|||||||
CHATBOT_HEIGHT, LAYOUT, AVAIL_LLM_MODELS, AUTO_CLEAR_TXT = get_conf('CHATBOT_HEIGHT', 'LAYOUT', 'AVAIL_LLM_MODELS', 'AUTO_CLEAR_TXT')
|
CHATBOT_HEIGHT, LAYOUT, AVAIL_LLM_MODELS, AUTO_CLEAR_TXT = get_conf('CHATBOT_HEIGHT', 'LAYOUT', 'AVAIL_LLM_MODELS', 'AUTO_CLEAR_TXT')
|
||||||
ENABLE_AUDIO, AUTO_CLEAR_TXT, PATH_LOGGING, AVAIL_THEMES, THEME = get_conf('ENABLE_AUDIO', 'AUTO_CLEAR_TXT', 'PATH_LOGGING', 'AVAIL_THEMES', 'THEME')
|
ENABLE_AUDIO, AUTO_CLEAR_TXT, PATH_LOGGING, AVAIL_THEMES, THEME = get_conf('ENABLE_AUDIO', 'AUTO_CLEAR_TXT', 'PATH_LOGGING', 'AVAIL_THEMES', 'THEME')
|
||||||
DARK_MODE, NUM_CUSTOM_BASIC_BTN, SSL_KEYFILE, SSL_CERTFILE = get_conf('DARK_MODE', 'NUM_CUSTOM_BASIC_BTN', 'SSL_KEYFILE', 'SSL_CERTFILE')
|
DARK_MODE, NUM_CUSTOM_BASIC_BTN, SSL_KEYFILE, SSL_CERTFILE = get_conf('DARK_MODE', 'NUM_CUSTOM_BASIC_BTN', 'SSL_KEYFILE', 'SSL_CERTFILE')
|
||||||
|
INIT_SYS_PROMPT, = get_conf('INIT_SYS_PROMPT')
|
||||||
|
|
||||||
# 如果WEB_PORT是-1, 则随机选取WEB端口
|
# 如果WEB_PORT是-1, 则随机选取WEB端口
|
||||||
PORT = find_free_port() if WEB_PORT <= 0 else WEB_PORT
|
PORT = find_free_port() if WEB_PORT <= 0 else WEB_PORT
|
||||||
from check_proxy import get_current_version
|
from check_proxy import get_current_version
|
||||||
from themes.theme import adjust_theme, advanced_css, theme_declaration, load_dynamic_theme
|
from themes.theme import adjust_theme, advanced_css, theme_declaration, load_dynamic_theme
|
||||||
|
|
||||||
initial_prompt = "Serve me as a writing and programming assistant."
|
|
||||||
title_html = f"<h1 align=\"center\">GPT 学术优化 {get_current_version()}</h1>{theme_declaration}"
|
title_html = f"<h1 align=\"center\">GPT 学术优化 {get_current_version()}</h1>{theme_declaration}"
|
||||||
description = "Github源代码开源和更新[地址🚀](https://github.com/binary-husky/gpt_academic), "
|
description = "Github源代码开源和更新[地址🚀](https://github.com/binary-husky/gpt_academic), "
|
||||||
description += "感谢热情的[开发者们❤️](https://github.com/binary-husky/gpt_academic/graphs/contributors)."
|
description += "感谢热情的[开发者们❤️](https://github.com/binary-husky/gpt_academic/graphs/contributors)."
|
||||||
@ -153,7 +153,7 @@ def main():
|
|||||||
top_p = gr.Slider(minimum=-0, maximum=1.0, value=1.0, step=0.01,interactive=True, label="Top-p (nucleus sampling)",)
|
top_p = gr.Slider(minimum=-0, maximum=1.0, value=1.0, step=0.01,interactive=True, label="Top-p (nucleus sampling)",)
|
||||||
temperature = gr.Slider(minimum=-0, maximum=2.0, value=1.0, step=0.01, interactive=True, label="Temperature",)
|
temperature = gr.Slider(minimum=-0, maximum=2.0, value=1.0, step=0.01, interactive=True, label="Temperature",)
|
||||||
max_length_sl = gr.Slider(minimum=256, maximum=1024*32, value=4096, step=128, interactive=True, label="Local LLM MaxLength",)
|
max_length_sl = gr.Slider(minimum=256, maximum=1024*32, value=4096, step=128, interactive=True, label="Local LLM MaxLength",)
|
||||||
system_prompt = gr.Textbox(show_label=True, lines=2, placeholder=f"System Prompt", label="System prompt", value=initial_prompt)
|
system_prompt = gr.Textbox(show_label=True, lines=2, placeholder=f"System Prompt", label="System prompt", value=INIT_SYS_PROMPT)
|
||||||
|
|
||||||
with gr.Tab("界面外观", elem_id="interact-panel"):
|
with gr.Tab("界面外观", elem_id="interact-panel"):
|
||||||
theme_dropdown = gr.Dropdown(AVAIL_THEMES, value=THEME, label="更换UI主题").style(container=False)
|
theme_dropdown = gr.Dropdown(AVAIL_THEMES, value=THEME, label="更换UI主题").style(container=False)
|
||||||
|
@ -521,7 +521,7 @@ if len(AZURE_CFG_ARRAY) > 0:
|
|||||||
}
|
}
|
||||||
})
|
})
|
||||||
if azure_model_name not in AVAIL_LLM_MODELS:
|
if azure_model_name not in AVAIL_LLM_MODELS:
|
||||||
azure_model_name += [azure_model_name]
|
AVAIL_LLM_MODELS += [azure_model_name]
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
Loading…
x
Reference in New Issue
Block a user