From 0c8c539e9b6e683aa851cc57798fa6ca30eb0b99 Mon Sep 17 00:00:00 2001 From: QQisQQ <40739351+QQisQQ@users.noreply.github.com> Date: Wed, 19 Jul 2023 04:39:15 +0800 Subject: [PATCH 1/3] =?UTF-8?q?=E8=A7=A3=E5=86=B3new=20bing=20=E6=8A=A5?= =?UTF-8?q?=E9=94=99200=20(fix=20new=20bing=20error=20code=20200=20)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit modify from https://github.com/acheong08/EdgeGPT/pull/610/commits/16e00af9d55e1e32c4389512b02cac1c7eca4672 works for my issue: ``` Traceback (most recent call last): File "./request_llm/bridge_newbingfree.py", line 152, in run asyncio.run(self.async_run()) File "/root/miniconda3/envs/py311/lib/python3.11/asyncio/runners.py", line 190, in run return runner.run(main) ^^^^^^^^^^^^^^^^ File "/root/miniconda3/envs/py311/lib/python3.11/asyncio/runners.py", line 118, in run return self._loop.run_until_complete(task) ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/root/miniconda3/envs/py311/lib/python3.11/asyncio/base_events.py", line 653, in run_until_complete return future.result() ^^^^^^^^^^^^^^^ File "./request_llm/bridge_newbingfree.py", line 98, in async_run async for final, response in self.newbing_model.ask_stream( File "./request_llm/edge_gpt_free.py", line 676, in ask_stream async for response in self.chat_hub.ask_stream( File "./request_llm/edge_gpt_free.py", line 456, in ask_stream self.wss = await self.session.ws_connect( ^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ File "/root/miniconda3/envs/py311/lib/python3.11/site-packages/aiohttp/client.py", line 795, in _ws_connect raise WSServerHandshakeError( aiohttp.client_exceptions.WSServerHandshakeError: 200, message='Invalid response status', url=URL('wss://sydney.bing.com/sydney/ChatHub') ``` --- request_llm/edge_gpt_free.py | 13 +++++++++++-- 1 file changed, 11 insertions(+), 2 deletions(-) diff --git a/request_llm/edge_gpt_free.py b/request_llm/edge_gpt_free.py index ef61873..1e96df0 100644 --- a/request_llm/edge_gpt_free.py +++ b/request_llm/edge_gpt_free.py @@ -447,6 +447,15 @@ class _ChatHub: """ Ask a question to the bot """ + req_header = HEADERS + if self.cookies is not None: + ws_cookies = [] + for cookie in self.cookies: + ws_cookies.append(f"{cookie['name']}={cookie['value']}") + req_header.update({ + 'Cookie': ';'.join(ws_cookies), + }) + timeout = aiohttp.ClientTimeout(total=30) self.session = aiohttp.ClientSession(timeout=timeout) @@ -455,7 +464,7 @@ class _ChatHub: # Check if websocket is closed self.wss = await self.session.ws_connect( wss_link, - headers=HEADERS, + headers=req_header, ssl=ssl_context, proxy=self.proxy, autoping=False, @@ -1109,4 +1118,4 @@ class ImageQuery(Query): if __name__ == "__main__": - main() \ No newline at end of file + main() From 3b88e00cfbf2c9bcfbbb63da4bb26efefd1b2ec8 Mon Sep 17 00:00:00 2001 From: doujiang-zheng Date: Wed, 19 Jul 2023 09:43:59 +0800 Subject: [PATCH 2/3] Add timestamp for chat_secrets.log and disable the verbose httpx log. --- main.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/main.py b/main.py index f5bb9af..0f8ea07 100644 --- a/main.py +++ b/main.py @@ -22,8 +22,10 @@ def main(): # 问询记录, python 版本建议3.9+(越新越好) import logging, uuid os.makedirs("gpt_log", exist_ok=True) - try:logging.basicConfig(filename="gpt_log/chat_secrets.log", level=logging.INFO, encoding="utf-8") - except:logging.basicConfig(filename="gpt_log/chat_secrets.log", level=logging.INFO) + try:logging.basicConfig(filename="gpt_log/chat_secrets.log", level=logging.INFO, encoding="utf-8", format="%(asctime)s %(levelname)-8s %(message)s", datefmt="%Y-%m-%d %H:%M:%S") + except:logging.basicConfig(filename="gpt_log/chat_secrets.log", level=logging.INFO, format="%(asctime)s %(levelname)-8s %(message)s", datefmt="%Y-%m-%d %H:%M:%S") + # Disable logging output from the 'httpx' logger + logging.getLogger("httpx").setLevel(logging.WARNING) print("所有问询记录将自动保存在本地目录./gpt_log/chat_secrets.log, 请注意自我隐私保护哦!") # 一些普通功能模块 From b8d77557b0416945ed8a2bd43697b3236f60046d Mon Sep 17 00:00:00 2001 From: binary-husky <96192199+binary-husky@users.noreply.github.com> Date: Thu, 20 Jul 2023 10:12:42 +0800 Subject: [PATCH 3/3] Update README.md --- README.md | 2 +- 1 file changed, 1 insertion(+), 1 deletion(-) diff --git a/README.md b/README.md index 762b21b..386d2d0 100644 --- a/README.md +++ b/README.md @@ -44,7 +44,7 @@ chat分析报告生成 | [函数插件] 运行后自动生成总结汇报 Latex论文一键校对 | [函数插件] 仿Grammarly对Latex文章进行语法、拼写纠错+输出对照PDF [谷歌学术统合小助手](https://www.bilibili.com/video/BV19L411U7ia) | [函数插件] 给定任意谷歌学术搜索页面URL,让gpt帮你[写relatedworks](https://www.bilibili.com/video/BV1GP411U7Az/) 互联网信息聚合+GPT | [函数插件] 一键[让GPT从互联网获取信息](https://www.bilibili.com/video/BV1om4y127ck)回答问题,让信息永不过时 -⭐Arxiv论文精细翻译 | [函数插件] 一键[以超高质量翻译arxiv论文](https://www.bilibili.com/video/BV1dz4y1v77A/),目前最好的论文翻译工具 +⭐Arxiv论文精细翻译 ([Docker](https://github.com/binary-husky/gpt_academic/pkgs/container/gpt_academic_with_latex)) | [函数插件] 一键[以超高质量翻译arxiv论文](https://www.bilibili.com/video/BV1dz4y1v77A/),目前最好的论文翻译工具 ⭐[实时语音对话输入](https://github.com/binary-husky/gpt_academic/blob/master/docs/use_audio.md) | [函数插件] 异步[监听音频](https://www.bilibili.com/video/BV1AV4y187Uy/),自动断句,自动寻找回答时机 公式/图片/表格显示 | 可以同时显示公式的[tex形式和渲染形式](https://user-images.githubusercontent.com/96192199/230598842-1d7fcddd-815d-40ee-af60-baf488a199df.png),支持公式、代码高亮 多线程函数插件支持 | 支持多线调用chatgpt,一键处理[海量文本](https://www.bilibili.com/video/BV1FT411H7c5/)或程序