diff --git a/request_llms/bridge_all.py b/request_llms/bridge_all.py
index 31749f1..deee1c7 100644
--- a/request_llms/bridge_all.py
+++ b/request_llms/bridge_all.py
@@ -854,7 +854,8 @@ def predict_no_ui_long_connection(inputs, llm_kwargs, history, sys_prompt, obser
# 观察窗(window)
chat_string = []
for i in range(n_model):
- chat_string.append( f"【{str(models[i])} 说】: {window_mutex[i][0]} " )
+ color = colors[i%len(colors)]
+ chat_string.append( f"【{str(models[i])} 说】: {window_mutex[i][0]} " )
res = '
\n\n---\n\n'.join(chat_string)
# # # # # # # # # # #
observe_window[0] = res
@@ -871,7 +872,8 @@ def predict_no_ui_long_connection(inputs, llm_kwargs, history, sys_prompt, obser
time.sleep(1)
for i, future in enumerate(futures): # wait and get
- return_string_collect.append( f"【{str(models[i])} 说】: {future.result()} " )
+ color = colors[i%len(colors)]
+ return_string_collect.append( f"【{str(models[i])} 说】: {future.result()} " )
window_mutex[-1] = False # stop mutex thread
res = '
\n\n---\n\n'.join(return_string_collect)