镜像自地址
https://github.com/binary-husky/gpt_academic.git
已同步 2025-12-07 23:16:48 +00:00
接入新模型
这个提交包含在:
@@ -42,7 +42,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
|
||||
yield from update_ui(chatbot=chatbot, history=history)
|
||||
|
||||
if validate_key() is False:
|
||||
yield from update_ui_lastest_msg(lastmsg="[Local Message]: 请配置讯飞星火大模型的XFYUN_APPID, XFYUN_API_KEY, XFYUN_API_SECRET", chatbot=chatbot, history=history, delay=0)
|
||||
yield from update_ui_lastest_msg(lastmsg="[Local Message] 请配置讯飞星火大模型的XFYUN_APPID, XFYUN_API_KEY, XFYUN_API_SECRET", chatbot=chatbot, history=history, delay=0)
|
||||
return
|
||||
|
||||
if additional_fn is not None:
|
||||
@@ -57,7 +57,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
|
||||
yield from update_ui(chatbot=chatbot, history=history)
|
||||
|
||||
# 总结输出
|
||||
if response == f"[Local Message]: 等待{model_name}响应中 ...":
|
||||
response = f"[Local Message]: {model_name}响应异常 ..."
|
||||
if response == f"[Local Message] 等待{model_name}响应中 ...":
|
||||
response = f"[Local Message] {model_name}响应异常 ..."
|
||||
history.extend([inputs, response])
|
||||
yield from update_ui(chatbot=chatbot, history=history)
|
||||
在新工单中引用
屏蔽一个用户