Fix response message bug in bridge_qianfan.py,
bridge_qwen.py, and bridge_skylark2.py
This commit is contained in:
parent
2e9b4a5770
commit
e359fff040
@ -146,9 +146,12 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
|
|||||||
yield from update_ui(chatbot=chatbot, history=history)
|
yield from update_ui(chatbot=chatbot, history=history)
|
||||||
# 开始接收回复
|
# 开始接收回复
|
||||||
try:
|
try:
|
||||||
|
response = f"[Local Message] 等待{model_name}响应中 ..."
|
||||||
for response in generate_from_baidu_qianfan(inputs, llm_kwargs, history, system_prompt):
|
for response in generate_from_baidu_qianfan(inputs, llm_kwargs, history, system_prompt):
|
||||||
chatbot[-1] = (inputs, response)
|
chatbot[-1] = (inputs, response)
|
||||||
yield from update_ui(chatbot=chatbot, history=history)
|
yield from update_ui(chatbot=chatbot, history=history)
|
||||||
|
history.extend([inputs, response])
|
||||||
|
yield from update_ui(chatbot=chatbot, history=history)
|
||||||
except ConnectionAbortedError as e:
|
except ConnectionAbortedError as e:
|
||||||
from .bridge_all import model_info
|
from .bridge_all import model_info
|
||||||
if len(history) >= 2: history[-1] = ""; history[-2] = "" # 清除当前溢出的输入:history[-2] 是本次输入, history[-1] 是本次输出
|
if len(history) >= 2: history[-1] = ""; history[-2] = "" # 清除当前溢出的输入:history[-2] 是本次输入, history[-1] 是本次输出
|
||||||
@ -157,10 +160,3 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
|
|||||||
chatbot[-1] = (chatbot[-1][0], "[Local Message] Reduce the length. 本次输入过长, 或历史数据过长. 历史缓存数据已部分释放, 您可以请再次尝试. (若再次失败则更可能是因为输入过长.)")
|
chatbot[-1] = (chatbot[-1][0], "[Local Message] Reduce the length. 本次输入过长, 或历史数据过长. 历史缓存数据已部分释放, 您可以请再次尝试. (若再次失败则更可能是因为输入过长.)")
|
||||||
yield from update_ui(chatbot=chatbot, history=history, msg="异常") # 刷新界面
|
yield from update_ui(chatbot=chatbot, history=history, msg="异常") # 刷新界面
|
||||||
return
|
return
|
||||||
|
|
||||||
# 总结输出
|
|
||||||
response = f"[Local Message] {model_name}响应异常 ..."
|
|
||||||
if response == f"[Local Message] 等待{model_name}响应中 ...":
|
|
||||||
response = f"[Local Message] {model_name}响应异常 ..."
|
|
||||||
history.extend([inputs, response])
|
|
||||||
yield from update_ui(chatbot=chatbot, history=history)
|
|
@ -51,6 +51,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
|
|||||||
# 开始接收回复
|
# 开始接收回复
|
||||||
from .com_qwenapi import QwenRequestInstance
|
from .com_qwenapi import QwenRequestInstance
|
||||||
sri = QwenRequestInstance()
|
sri = QwenRequestInstance()
|
||||||
|
response = f"[Local Message] 等待{model_name}响应中 ..."
|
||||||
for response in sri.generate(inputs, llm_kwargs, history, system_prompt):
|
for response in sri.generate(inputs, llm_kwargs, history, system_prompt):
|
||||||
chatbot[-1] = (inputs, response)
|
chatbot[-1] = (inputs, response)
|
||||||
yield from update_ui(chatbot=chatbot, history=history)
|
yield from update_ui(chatbot=chatbot, history=history)
|
||||||
|
@ -56,6 +56,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
|
|||||||
# 开始接收回复
|
# 开始接收回复
|
||||||
from .com_skylark2api import YUNQUERequestInstance
|
from .com_skylark2api import YUNQUERequestInstance
|
||||||
sri = YUNQUERequestInstance()
|
sri = YUNQUERequestInstance()
|
||||||
|
response = f"[Local Message] 等待{model_name}响应中 ..."
|
||||||
for response in sri.generate(inputs, llm_kwargs, history, system_prompt):
|
for response in sri.generate(inputs, llm_kwargs, history, system_prompt):
|
||||||
chatbot[-1] = (inputs, response)
|
chatbot[-1] = (inputs, response)
|
||||||
yield from update_ui(chatbot=chatbot, history=history)
|
yield from update_ui(chatbot=chatbot, history=history)
|
||||||
|
@ -52,6 +52,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
|
|||||||
# 开始接收回复
|
# 开始接收回复
|
||||||
from .com_sparkapi import SparkRequestInstance
|
from .com_sparkapi import SparkRequestInstance
|
||||||
sri = SparkRequestInstance()
|
sri = SparkRequestInstance()
|
||||||
|
response = f"[Local Message] 等待{model_name}响应中 ..."
|
||||||
for response in sri.generate(inputs, llm_kwargs, history, system_prompt, use_image_api=True):
|
for response in sri.generate(inputs, llm_kwargs, history, system_prompt, use_image_api=True):
|
||||||
chatbot[-1] = (inputs, response)
|
chatbot[-1] = (inputs, response)
|
||||||
yield from update_ui(chatbot=chatbot, history=history)
|
yield from update_ui(chatbot=chatbot, history=history)
|
||||||
|
Loading…
x
Reference in New Issue
Block a user