From e594e1b928aadb36d291184bca1deee8601621a8 Mon Sep 17 00:00:00 2001 From: binary-husky Date: Thu, 18 Jan 2024 00:31:53 +0800 Subject: [PATCH 1/4] update zhipu --- config.py | 4 ++-- request_llms/com_zhipuapi.py | 3 ++- 2 files changed, 4 insertions(+), 3 deletions(-) diff --git a/config.py b/config.py index 0bdba13..c11adb2 100644 --- a/config.py +++ b/config.py @@ -90,9 +90,9 @@ LLM_MODEL = "gpt-3.5-turbo" # 可选 ↓↓↓ AVAIL_LLM_MODELS = ["gpt-3.5-turbo-1106","gpt-4-1106-preview","gpt-4-vision-preview", "gpt-3.5-turbo-16k", "gpt-3.5-turbo", "azure-gpt-3.5", "gpt-4", "gpt-4-32k", "azure-gpt-4", "api2d-gpt-4", - "gemini-pro", "chatglm3", "moss", "claude-2"] + "gemini-pro", "chatglm3", "claude-2", "zhipuai"] # P.S. 其他可用的模型还包括 [ -# "qwen-turbo", "qwen-plus", "qwen-max" +# "moss", "qwen-turbo", "qwen-plus", "qwen-max" # "zhipuai", "qianfan", "deepseekcoder", "llama2", "qwen-local", "gpt-3.5-turbo-0613", # "gpt-3.5-turbo-16k-0613", "gpt-3.5-random", "api2d-gpt-3.5-turbo", 'api2d-gpt-3.5-turbo-16k', # "spark", "sparkv2", "sparkv3", "chatglm_onnx", "claude-1-100k", "claude-2", "internlm", "jittorllms_pangualpha", "jittorllms_llama" diff --git a/request_llms/com_zhipuapi.py b/request_llms/com_zhipuapi.py index ba826d2..f2b4874 100644 --- a/request_llms/com_zhipuapi.py +++ b/request_llms/com_zhipuapi.py @@ -37,7 +37,8 @@ class ZhipuRequestInstance(): break else: raise RuntimeError("Unknown error:" + str(event)) - + if self.result_buf == "": + yield "智谱没有返回任何数据, 请检查ZHIPUAI_API_KEY和ZHIPUAI_MODEL是否填写正确." logging.info(f'[raw_input] {inputs}') logging.info(f'[response] {self.result_buf}') return self.result_buf From 94fc396eb97cedb175d104b20ea8a1697beeed65 Mon Sep 17 00:00:00 2001 From: binary-husky Date: Thu, 18 Jan 2024 15:32:17 +0800 Subject: [PATCH 2/4] Fix translation task name in core_functional.py --- core_functional.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/core_functional.py b/core_functional.py index e19e82e..63648f4 100644 --- a/core_functional.py +++ b/core_functional.py @@ -50,7 +50,7 @@ def get_core_functions(): "Prefix": r"Please translate following sentence to English:" + "\n\n", "Suffix": r"", }, - "学术中英互译": { + "学术英中互译": { "Prefix": r"I want you to act as a scientific English-Chinese translator, " + r"I will provide you with some paragraphs in one language " + r"and your task is to accurately and academically translate the paragraphs only into the other language. " + From dcc9326f0bf14ec2a68fc4b4338db2f70acdbf34 Mon Sep 17 00:00:00 2001 From: binary-husky Date: Thu, 18 Jan 2024 17:51:20 +0800 Subject: [PATCH 3/4] zhipuai version problem --- request_llms/bridge_zhipu.py | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/request_llms/bridge_zhipu.py b/request_llms/bridge_zhipu.py index 915a13e..91903ad 100644 --- a/request_llms/bridge_zhipu.py +++ b/request_llms/bridge_zhipu.py @@ -42,7 +42,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp try: check_packages(["zhipuai"]) except: - yield from update_ui_lastest_msg(f"导入软件依赖失败。使用该模型需要额外依赖,安装方法```pip install --upgrade zhipuai```。", + yield from update_ui_lastest_msg(f"导入软件依赖失败。使用该模型需要额外依赖,安装方法```pip install zhipuai==1.0.7```。", chatbot=chatbot, history=history, delay=0) return From b8ebefa42785dfb05690f7ad680894cecca2e434 Mon Sep 17 00:00:00 2001 From: XIao <46100050+Kilig947@users.noreply.github.com> Date: Thu, 18 Jan 2024 18:06:07 +0800 Subject: [PATCH 4/4] Google gemini fix (#1473) MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * 适配 google gemini 优化为从用户input中提取文件 * Update README.md (#1477) * Update README.md * Update README.md * Update requirements.txt (#1480) * welcome glm4 from 智谱! * Update README.md (#1484) * Update README.md (#1485) * update zhipu * Fix translation task name in core_functional.py * zhipuai version problem --------- Co-authored-by: binary-husky <96192199+binary-husky@users.noreply.github.com> Co-authored-by: binary-husky --- request_llms/bridge_google_gemini.py | 7 ++++++- 1 file changed, 6 insertions(+), 1 deletion(-) diff --git a/request_llms/bridge_google_gemini.py b/request_llms/bridge_google_gemini.py index 49d8211..48e5419 100644 --- a/request_llms/bridge_google_gemini.py +++ b/request_llms/bridge_google_gemini.py @@ -19,7 +19,7 @@ def predict_no_ui_long_connection(inputs, llm_kwargs, history=[], sys_prompt="", # 检查API_KEY if get_conf("GEMINI_API_KEY") == "": raise ValueError(f"请配置 GEMINI_API_KEY。") - + genai = GoogleChatInit() watch_dog_patience = 5 # 看门狗的耐心, 设置5秒即可 gpt_replying_buffer = '' @@ -50,6 +50,11 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp yield from update_ui_lastest_msg(f"请配置 GEMINI_API_KEY。", chatbot=chatbot, history=history, delay=0) return + # 适配润色区域 + if additional_fn is not None: + from core_functional import handle_core_functionality + inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot) + if "vision" in llm_kwargs["llm_model"]: have_recent_file, image_paths = have_any_recent_upload_image_files(chatbot) def make_media_input(inputs, image_paths):