From 5d75c578b998b1f9f0ab67b5fee1961cb239dfe1 Mon Sep 17 00:00:00 2001 From: binary-husky <505030475@qq.com> Date: Thu, 25 May 2023 15:28:27 +0800 Subject: [PATCH] fix dependency --- request_llm/bridge_all.py | 29 ++++++++++++++------------ request_llm/edge_gpt_free.py | 40 ++++++++++++++++++------------------ 2 files changed, 36 insertions(+), 33 deletions(-) diff --git a/request_llm/bridge_all.py b/request_llm/bridge_all.py index 5c0c8d5..b6efe21 100644 --- a/request_llm/bridge_all.py +++ b/request_llm/bridge_all.py @@ -202,19 +202,22 @@ if "stack-claude" in AVAIL_LLM_MODELS: } }) if "newbing-free" in AVAIL_LLM_MODELS: - from .bridge_newbingfree import predict_no_ui_long_connection as newbingfree_noui - from .bridge_newbingfree import predict as newbingfree_ui - # claude - model_info.update({ - "newbing-free": { - "fn_with_ui": newbingfree_ui, - "fn_without_ui": newbingfree_noui, - "endpoint": newbing_endpoint, - "max_token": 4096, - "tokenizer": tokenizer_gpt35, - "token_cnt": get_token_num_gpt35, - } - }) + try: + from .bridge_newbingfree import predict_no_ui_long_connection as newbingfree_noui + from .bridge_newbingfree import predict as newbingfree_ui + # claude + model_info.update({ + "newbing-free": { + "fn_with_ui": newbingfree_ui, + "fn_without_ui": newbingfree_noui, + "endpoint": newbing_endpoint, + "max_token": 4096, + "tokenizer": tokenizer_gpt35, + "token_cnt": get_token_num_gpt35, + } + }) + except: + print(trimmed_format_exc()) def LLM_CATCH_EXCEPTION(f): """ diff --git a/request_llm/edge_gpt_free.py b/request_llm/edge_gpt_free.py index 7e893d4..ef61873 100644 --- a/request_llm/edge_gpt_free.py +++ b/request_llm/edge_gpt_free.py @@ -196,9 +196,9 @@ class _ChatHubRequest: self, prompt: str, conversation_style: CONVERSATION_STYLE_TYPE, - options: list | None = None, - webpage_context: str | None = None, - search_result: bool = False, + options = None, + webpage_context = None, + search_result = False, ) -> None: """ Updates request object @@ -294,9 +294,9 @@ class _Conversation: def __init__( self, - proxy: str | None = None, - async_mode: bool = False, - cookies: list[dict] | None = None, + proxy = None, + async_mode = False, + cookies = None, ) -> None: if async_mode: return @@ -350,8 +350,8 @@ class _Conversation: @staticmethod async def create( - proxy: str | None = None, - cookies: list[dict] | None = None, + proxy = None, + cookies = None, ): self = _Conversation(async_mode=True) self.struct = { @@ -418,11 +418,11 @@ class _ChatHub: def __init__( self, conversation: _Conversation, - proxy: str = None, - cookies: list[dict] | None = None, + proxy = None, + cookies = None, ) -> None: - self.session: aiohttp.ClientSession | None = None - self.wss: aiohttp.ClientWebSocketResponse | None = None + self.session = None + self.wss = None self.request: _ChatHubRequest self.loop: bool self.task: asyncio.Task @@ -441,7 +441,7 @@ class _ChatHub: conversation_style: CONVERSATION_STYLE_TYPE = None, raw: bool = False, options: dict = None, - webpage_context: str | None = None, + webpage_context = None, search_result: bool = False, ) -> Generator[str, None, None]: """ @@ -611,10 +611,10 @@ class Chatbot: def __init__( self, - proxy: str | None = None, - cookies: list[dict] | None = None, + proxy = None, + cookies = None, ) -> None: - self.proxy: str | None = proxy + self.proxy = proxy self.chat_hub: _ChatHub = _ChatHub( _Conversation(self.proxy, cookies=cookies), proxy=self.proxy, @@ -623,8 +623,8 @@ class Chatbot: @staticmethod async def create( - proxy: str | None = None, - cookies: list[dict] | None = None, + proxy = None, + cookies = None, ): self = Chatbot.__new__(Chatbot) self.proxy = proxy @@ -641,7 +641,7 @@ class Chatbot: wss_link: str = "wss://sydney.bing.com/sydney/ChatHub", conversation_style: CONVERSATION_STYLE_TYPE = None, options: dict = None, - webpage_context: str | None = None, + webpage_context = None, search_result: bool = False, ) -> dict: """ @@ -667,7 +667,7 @@ class Chatbot: conversation_style: CONVERSATION_STYLE_TYPE = None, raw: bool = False, options: dict = None, - webpage_context: str | None = None, + webpage_context = None, search_result: bool = False, ) -> Generator[str, None, None]: """