优化提示
This commit is contained in:
		
							parent
							
								
									9c2a6bc413
								
							
						
					
					
						commit
						3041858e7f
					
				@ -611,7 +611,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
 | 
				
			|||||||
        单线程方法
 | 
					        单线程方法
 | 
				
			||||||
        函数的说明请见 request_llm/bridge_all.py
 | 
					        函数的说明请见 request_llm/bridge_all.py
 | 
				
			||||||
    """
 | 
					    """
 | 
				
			||||||
    chatbot.append((inputs, "[Local Message]: 等待Bing响应 ..."))
 | 
					    chatbot.append((inputs, "[Local Message]: 等待NewBing响应中 ..."))
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    global newbing_handle
 | 
					    global newbing_handle
 | 
				
			||||||
    if (newbing_handle is None) or (not newbing_handle.success):
 | 
					    if (newbing_handle is None) or (not newbing_handle.success):
 | 
				
			||||||
@ -633,7 +633,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
 | 
				
			|||||||
    for i in range(len(history)//2):
 | 
					    for i in range(len(history)//2):
 | 
				
			||||||
        history_feedin.append([history[2*i], history[2*i+1]] )
 | 
					        history_feedin.append([history[2*i], history[2*i+1]] )
 | 
				
			||||||
 | 
					
 | 
				
			||||||
    chatbot[-1] = (inputs, "[Local Message]: 等待Bing响应 ...")
 | 
					    chatbot[-1] = (inputs, "[Local Message]: 等待NewBing响应中 ...")
 | 
				
			||||||
    yield from update_ui(chatbot=chatbot, history=history, msg="NewBing响应缓慢,尚未完成全部响应,请耐心完成后再提交新问题。")
 | 
					    yield from update_ui(chatbot=chatbot, history=history, msg="NewBing响应缓慢,尚未完成全部响应,请耐心完成后再提交新问题。")
 | 
				
			||||||
    for response in newbing_handle.stream_chat(query=inputs, history=history_feedin, system_prompt=system_prompt, max_length=llm_kwargs['max_length'], top_p=llm_kwargs['top_p'], temperature=llm_kwargs['temperature']):
 | 
					    for response in newbing_handle.stream_chat(query=inputs, history=history_feedin, system_prompt=system_prompt, max_length=llm_kwargs['max_length'], top_p=llm_kwargs['top_p'], temperature=llm_kwargs['temperature']):
 | 
				
			||||||
        chatbot[-1] = (inputs, preprocess_newbing_out(response))
 | 
					        chatbot[-1] = (inputs, preprocess_newbing_out(response))
 | 
				
			||||||
 | 
				
			|||||||
		Loading…
	
	
			
			x
			
			
		
	
		Reference in New Issue
	
	Block a user