允许模块预热时使用Proxy

This commit is contained in:
binary-husky 2023-09-27 15:53:45 +08:00
parent 0844b6e9cf
commit d2d5665c37
2 changed files with 7 additions and 5 deletions

View File

@ -155,11 +155,13 @@ def auto_update(raise_error=False):
def warm_up_modules():
print('正在执行一些模块的预热...')
from toolbox import ProxyNetworkActivate
from request_llm.bridge_all import model_info
enc = model_info["gpt-3.5-turbo"]['tokenizer']
enc.encode("模块预热", disallowed_special=())
enc = model_info["gpt-4"]['tokenizer']
enc.encode("模块预热", disallowed_special=())
with ProxyNetworkActivate("Warmup_Modules"):
enc = model_info["gpt-3.5-turbo"]['tokenizer']
enc.encode("模块预热", disallowed_special=())
enc = model_info["gpt-4"]['tokenizer']
enc.encode("模块预热", disallowed_special=())
if __name__ == '__main__':
import os

View File

@ -184,7 +184,7 @@ PATH_PRIVATE_UPLOAD = "private_upload"
# 日志文件夹的位置,请勿修改
PATH_LOGGING = "gpt_log"
# 除了连接OpenAI之外还有哪些场合允许使用代理请勿修改
WHEN_TO_USE_PROXY = ["Download_LLM", "Download_Gradio_Theme", "Connect_Grobid"]
WHEN_TO_USE_PROXY = ["Download_LLM", "Download_Gradio_Theme", "Connect_Grobid", "Warmup_Modules"]
"""