diff --git a/request_llms/bridge_qwen.py b/request_llms/bridge_qwen_7B.py similarity index 92% rename from request_llms/bridge_qwen.py rename to request_llms/bridge_qwen_7B.py index 85a4d80..dfe1fc4 100644 --- a/request_llms/bridge_qwen.py +++ b/request_llms/bridge_qwen_7B.py @@ -1,4 +1,4 @@ -model_name = "Qwen" +model_name = "Qwen-7B" cmd_to_install = "`pip install -r request_llms/requirements_qwen.txt`" @@ -30,7 +30,7 @@ class GetQwenLMHandle(LocalLLMHandle): from modelscope import AutoModelForCausalLM, AutoTokenizer, GenerationConfig with ProxyNetworkActivate('Download_LLM'): - model_id = 'qwen/Qwen-7B-Chat' + model_id = 'qwen/Qwen-7B-Chat' #在这里更改路径,如果你已经下载好了的话,同时,别忘记tokenizer self._tokenizer = AutoTokenizer.from_pretrained('Qwen/Qwen-7B-Chat', trust_remote_code=True, resume_download=True) # use fp16 model = AutoModelForCausalLM.from_pretrained(model_id, device_map="auto", trust_remote_code=True, fp16=True).eval() @@ -51,7 +51,7 @@ class GetQwenLMHandle(LocalLLMHandle): query, max_length, top_p, temperature, history = adaptor(kwargs) - for response in self._model.chat(self._tokenizer, query, history=history, stream=True): + for response in self._model.chat_stream(self._tokenizer, query, history=history): yield response def try_to_import_special_deps(self, **kwargs): diff --git a/tests/test_llms.py b/tests/test_llms.py index 8b68597..bdb622b 100644 --- a/tests/test_llms.py +++ b/tests/test_llms.py @@ -16,8 +16,9 @@ if __name__ == "__main__": # from request_llms.bridge_jittorllms_llama import predict_no_ui_long_connection # from request_llms.bridge_claude import predict_no_ui_long_connection # from request_llms.bridge_internlm import predict_no_ui_long_connection - from request_llms.bridge_deepseekcoder import predict_no_ui_long_connection - # from request_llms.bridge_qwen import predict_no_ui_long_connection + # from request_llms.bridge_deepseekcoder import predict_no_ui_long_connection + # from request_llms.bridge_qwen_7B import predict_no_ui_long_connection + from request_llms.bridge_qwen import predict_no_ui_long_connection # from request_llms.bridge_spark import predict_no_ui_long_connection # from request_llms.bridge_zhipu import predict_no_ui_long_connection # from request_llms.bridge_chatglm3 import predict_no_ui_long_connection