diff --git a/check_proxy.py b/check_proxy.py index b6fe99f..740eed2 100644 --- a/check_proxy.py +++ b/check_proxy.py @@ -155,11 +155,13 @@ def auto_update(raise_error=False): def warm_up_modules(): print('正在执行一些模块的预热...') + from toolbox import ProxyNetworkActivate from request_llm.bridge_all import model_info - enc = model_info["gpt-3.5-turbo"]['tokenizer'] - enc.encode("模块预热", disallowed_special=()) - enc = model_info["gpt-4"]['tokenizer'] - enc.encode("模块预热", disallowed_special=()) + with ProxyNetworkActivate("Warmup_Modules"): + enc = model_info["gpt-3.5-turbo"]['tokenizer'] + enc.encode("模块预热", disallowed_special=()) + enc = model_info["gpt-4"]['tokenizer'] + enc.encode("模块预热", disallowed_special=()) if __name__ == '__main__': import os diff --git a/config.py b/config.py index e4b23a1..34f5b0d 100644 --- a/config.py +++ b/config.py @@ -184,7 +184,7 @@ PATH_PRIVATE_UPLOAD = "private_upload" # 日志文件夹的位置,请勿修改 PATH_LOGGING = "gpt_log" # 除了连接OpenAI之外,还有哪些场合允许使用代理,请勿修改 -WHEN_TO_USE_PROXY = ["Download_LLM", "Download_Gradio_Theme", "Connect_Grobid"] +WHEN_TO_USE_PROXY = ["Download_LLM", "Download_Gradio_Theme", "Connect_Grobid", "Warmup_Modules"] """