From 216d4374e7ce9eb84a6a33c24c8b826b69785478 Mon Sep 17 00:00:00 2001 From: binary-husky Date: Mon, 1 Apr 2024 00:11:32 +0800 Subject: [PATCH] fix color list overflow --- request_llms/bridge_all.py | 6 ++++-- 1 file changed, 4 insertions(+), 2 deletions(-) diff --git a/request_llms/bridge_all.py b/request_llms/bridge_all.py index 31749f1..deee1c7 100644 --- a/request_llms/bridge_all.py +++ b/request_llms/bridge_all.py @@ -854,7 +854,8 @@ def predict_no_ui_long_connection(inputs, llm_kwargs, history, sys_prompt, obser # 观察窗(window) chat_string = [] for i in range(n_model): - chat_string.append( f"【{str(models[i])} 说】: {window_mutex[i][0]} " ) + color = colors[i%len(colors)] + chat_string.append( f"【{str(models[i])} 说】: {window_mutex[i][0]} " ) res = '

\n\n---\n\n'.join(chat_string) # # # # # # # # # # # observe_window[0] = res @@ -871,7 +872,8 @@ def predict_no_ui_long_connection(inputs, llm_kwargs, history, sys_prompt, obser time.sleep(1) for i, future in enumerate(futures): # wait and get - return_string_collect.append( f"【{str(models[i])} 说】: {future.result()} " ) + color = colors[i%len(colors)] + return_string_collect.append( f"【{str(models[i])} 说】: {future.result()} " ) window_mutex[-1] = False # stop mutex thread res = '

\n\n---\n\n'.join(return_string_collect)