接入新模型

这个提交包含在:
binary-husky
2023-10-28 19:23:43 +08:00
父节点 cf085565a7
当前提交 127385b846
共有 18 个文件被更改,包括 253 次插入40 次删除

查看文件

@@ -163,13 +163,13 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
history_feedin.append([history[2*i], history[2*i+1]] )
# 开始接收jittorllms的回复
response = "[Local Message]: 等待jittorllms响应中 ..."
response = "[Local Message] 等待jittorllms响应中 ..."
for response in pangu_glm_handle.stream_chat(query=inputs, history=history_feedin, system_prompt=system_prompt, max_length=llm_kwargs['max_length'], top_p=llm_kwargs['top_p'], temperature=llm_kwargs['temperature']):
chatbot[-1] = (inputs, response)
yield from update_ui(chatbot=chatbot, history=history)
# 总结输出
if response == "[Local Message]: 等待jittorllms响应中 ...":
response = "[Local Message]: jittorllms响应异常 ..."
if response == "[Local Message] 等待jittorllms响应中 ...":
response = "[Local Message] jittorllms响应异常 ..."
history.extend([inputs, response])
yield from update_ui(chatbot=chatbot, history=history)