From 2d8f37baba454b6884dd9fa84cbae38220f5f5e5 Mon Sep 17 00:00:00 2001 From: binary-husky Date: Sat, 23 Sep 2023 22:43:15 +0800 Subject: [PATCH] =?UTF-8?q?=E7=BB=86=E5=88=86=E4=BB=A3=E7=90=86=E5=9C=BA?= =?UTF-8?q?=E6=99=AF?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit --- config.py | 3 +++ crazy_functions/Langchain知识库.py | 4 ++-- crazy_functions/crazy_utils.py | 2 +- request_llm/bridge_chatglm.py | 21 +++++++++++---------- request_llm/bridge_llama2.py | 2 +- themes/gradios.py | 4 ++-- toolbox.py | 12 ++++++++++++ 7 files changed, 32 insertions(+), 16 deletions(-) diff --git a/config.py b/config.py index 4b77bf9..a3bb74c 100644 --- a/config.py +++ b/config.py @@ -183,6 +183,9 @@ ALLOW_RESET_CONFIG = False PATH_PRIVATE_UPLOAD = "private_upload" # 日志文件夹的位置,请勿修改 PATH_LOGGING = "gpt_log" +# 除了连接OpenAI之外,还有哪些场合允许使用代理,请勿修改 +WHEN_TO_USE_PROXY = ["Download_LLM", "Download_Gradio_Theme"] + """ 在线大模型配置关联关系示意图 diff --git a/crazy_functions/Langchain知识库.py b/crazy_functions/Langchain知识库.py index 741a3d0..8433895 100644 --- a/crazy_functions/Langchain知识库.py +++ b/crazy_functions/Langchain知识库.py @@ -53,14 +53,14 @@ def 知识库问答(txt, llm_kwargs, plugin_kwargs, chatbot, history, system_pro yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 print('Checking Text2vec ...') from langchain.embeddings.huggingface import HuggingFaceEmbeddings - with ProxyNetworkActivate(): # 临时地激活代理网络 + with ProxyNetworkActivate('Download_LLM'): # 临时地激活代理网络 HuggingFaceEmbeddings(model_name="GanymedeNil/text2vec-large-chinese") # < -------------------构建知识库--------------- > chatbot.append(['
'.join(file_manifest), "正在构建知识库..."]) yield from update_ui(chatbot=chatbot, history=history) # 刷新界面 print('Establishing knowledge archive ...') - with ProxyNetworkActivate(): # 临时地激活代理网络 + with ProxyNetworkActivate('Download_LLM'): # 临时地激活代理网络 kai = knowledge_archive_interface() kai.feed_archive(file_manifest=file_manifest, id=kai_id) kai_files = kai.get_loaded_file() diff --git a/crazy_functions/crazy_utils.py b/crazy_functions/crazy_utils.py index 4bdd1fd..b7a1819 100644 --- a/crazy_functions/crazy_utils.py +++ b/crazy_functions/crazy_utils.py @@ -651,7 +651,7 @@ class knowledge_archive_interface(): from toolbox import ProxyNetworkActivate print('Checking Text2vec ...') from langchain.embeddings.huggingface import HuggingFaceEmbeddings - with ProxyNetworkActivate(): # 临时地激活代理网络 + with ProxyNetworkActivate('Download_LLM'): # 临时地激活代理网络 self.text2vec_large_chinese = HuggingFaceEmbeddings(model_name="GanymedeNil/text2vec-large-chinese") return self.text2vec_large_chinese diff --git a/request_llm/bridge_chatglm.py b/request_llm/bridge_chatglm.py index 6dac863..387b3e2 100644 --- a/request_llm/bridge_chatglm.py +++ b/request_llm/bridge_chatglm.py @@ -3,7 +3,7 @@ from transformers import AutoModel, AutoTokenizer import time import threading import importlib -from toolbox import update_ui, get_conf +from toolbox import update_ui, get_conf, ProxyNetworkActivate from multiprocessing import Process, Pipe load_message = "ChatGLM尚未加载,加载需要一段时间。注意,取决于`config.py`的配置,ChatGLM消耗大量的内存(CPU)或显存(GPU),也许会导致低配计算机卡死 ……" @@ -48,16 +48,17 @@ class GetGLMHandle(Process): while True: try: - if self.chatglm_model is None: - self.chatglm_tokenizer = AutoTokenizer.from_pretrained(_model_name_, trust_remote_code=True) - if device=='cpu': - self.chatglm_model = AutoModel.from_pretrained(_model_name_, trust_remote_code=True).float() + with ProxyNetworkActivate('Download_LLM'): + if self.chatglm_model is None: + self.chatglm_tokenizer = AutoTokenizer.from_pretrained(_model_name_, trust_remote_code=True) + if device=='cpu': + self.chatglm_model = AutoModel.from_pretrained(_model_name_, trust_remote_code=True).float() + else: + self.chatglm_model = AutoModel.from_pretrained(_model_name_, trust_remote_code=True).half().cuda() + self.chatglm_model = self.chatglm_model.eval() + break else: - self.chatglm_model = AutoModel.from_pretrained(_model_name_, trust_remote_code=True).half().cuda() - self.chatglm_model = self.chatglm_model.eval() - break - else: - break + break except: retry += 1 if retry > 3: diff --git a/request_llm/bridge_llama2.py b/request_llm/bridge_llama2.py index e236c94..d1be446 100644 --- a/request_llm/bridge_llama2.py +++ b/request_llm/bridge_llama2.py @@ -30,7 +30,7 @@ class GetONNXGLMHandle(LocalLLMHandle): with open(os.path.expanduser('~/.cache/huggingface/token'), 'w') as f: f.write(huggingface_token) model_id = 'meta-llama/Llama-2-7b-chat-hf' - with ProxyNetworkActivate(): + with ProxyNetworkActivate('Download_LLM'): self._tokenizer = AutoTokenizer.from_pretrained(model_id, use_auth_token=huggingface_token) # use fp16 model = AutoModelForCausalLM.from_pretrained(model_id, use_auth_token=huggingface_token).eval() diff --git a/themes/gradios.py b/themes/gradios.py index acabf75..6a34e88 100644 --- a/themes/gradios.py +++ b/themes/gradios.py @@ -5,7 +5,7 @@ CODE_HIGHLIGHT, ADD_WAIFU, LAYOUT = get_conf('CODE_HIGHLIGHT', 'ADD_WAIFU', 'LAY def dynamic_set_theme(THEME): set_theme = gr.themes.ThemeClass() - with ProxyNetworkActivate(): + with ProxyNetworkActivate('Download_Gradio_Theme'): logging.info('正在下载Gradio主题,请稍等。') if THEME.startswith('Huggingface-'): THEME = THEME.lstrip('Huggingface-') if THEME.startswith('huggingface-'): THEME = THEME.lstrip('huggingface-') @@ -16,7 +16,7 @@ def adjust_theme(): try: set_theme = gr.themes.ThemeClass() - with ProxyNetworkActivate(): + with ProxyNetworkActivate('Download_Gradio_Theme'): logging.info('正在下载Gradio主题,请稍等。') THEME, = get_conf('THEME') if THEME.startswith('Huggingface-'): THEME = THEME.lstrip('Huggingface-') diff --git a/toolbox.py b/toolbox.py index 2ac1ef6..04853bc 100644 --- a/toolbox.py +++ b/toolbox.py @@ -956,7 +956,19 @@ class ProxyNetworkActivate(): """ 这段代码定义了一个名为TempProxy的空上下文管理器, 用于给一小段代码上代理 """ + def __init__(self, task=None) -> None: + self.task = task + if not task: + # 不给定task, 那么我们默认代理生效 + self.valid = True + else: + # 给定了task, 我们检查一下 + from toolbox import get_conf + WHEN_TO_USE_PROXY, = get_conf('WHEN_TO_USE_PROXY') + self.valid = (task in WHEN_TO_USE_PROXY) + def __enter__(self): + if not self.valid: return self from toolbox import get_conf proxies, = get_conf('proxies') if 'no_proxy' in os.environ: os.environ.pop('no_proxy')