镜像自地址
https://github.com/binary-husky/gpt_academic.git
已同步 2025-12-06 06:26:47 +00:00
Merge branch 'master' into frontier
这个提交包含在:
@@ -170,7 +170,7 @@ def predict_no_ui_long_connection(inputs:str, llm_kwargs:dict, history:list=[],
|
|||||||
except requests.exceptions.ConnectionError:
|
except requests.exceptions.ConnectionError:
|
||||||
chunk = next(stream_response) # 失败了,重试一次?再失败就没办法了。
|
chunk = next(stream_response) # 失败了,重试一次?再失败就没办法了。
|
||||||
chunk_decoded, chunkjson, has_choices, choice_valid, has_content, has_role = decode_chunk(chunk)
|
chunk_decoded, chunkjson, has_choices, choice_valid, has_content, has_role = decode_chunk(chunk)
|
||||||
if len(chunk_decoded)==0: continue
|
if len(chunk_decoded)==0 or chunk_decoded.startswith(':'): continue
|
||||||
if not chunk_decoded.startswith('data:'):
|
if not chunk_decoded.startswith('data:'):
|
||||||
error_msg = get_full_error(chunk, stream_response).decode()
|
error_msg = get_full_error(chunk, stream_response).decode()
|
||||||
if "reduce the length" in error_msg:
|
if "reduce the length" in error_msg:
|
||||||
@@ -181,9 +181,6 @@ def predict_no_ui_long_connection(inputs:str, llm_kwargs:dict, history:list=[],
|
|||||||
raise RuntimeError("OpenAI拒绝了请求:" + error_msg)
|
raise RuntimeError("OpenAI拒绝了请求:" + error_msg)
|
||||||
if ('data: [DONE]' in chunk_decoded): break # api2d 正常完成
|
if ('data: [DONE]' in chunk_decoded): break # api2d 正常完成
|
||||||
# 提前读取一些信息 (用于判断异常)
|
# 提前读取一些信息 (用于判断异常)
|
||||||
if (has_choices and not choice_valid) or ('OPENROUTER PROCESSING' in chunk_decoded):
|
|
||||||
# 一些垃圾第三方接口的出现这样的错误,openrouter的特殊处理
|
|
||||||
continue
|
|
||||||
json_data = chunkjson['choices'][0]
|
json_data = chunkjson['choices'][0]
|
||||||
delta = json_data["delta"]
|
delta = json_data["delta"]
|
||||||
if len(delta) == 0: break
|
if len(delta) == 0: break
|
||||||
@@ -328,8 +325,7 @@ def predict(inputs:str, llm_kwargs:dict, plugin_kwargs:dict, chatbot:ChatBotWith
|
|||||||
|
|
||||||
if chunk:
|
if chunk:
|
||||||
try:
|
try:
|
||||||
if (has_choices and not choice_valid) or ('OPENROUTER PROCESSING' in chunk_decoded):
|
if (has_choices and not choice_valid) or chunk_decoded.startswith(':'):
|
||||||
# 一些垃圾第三方接口的出现这样的错误, 或者OPENROUTER的特殊处理,因为OPENROUTER的数据流未连接到模型时会出现OPENROUTER PROCESSING
|
|
||||||
continue
|
continue
|
||||||
if ('data: [DONE]' not in chunk_decoded) and len(chunk_decoded) > 0 and (chunkjson is None):
|
if ('data: [DONE]' not in chunk_decoded) and len(chunk_decoded) > 0 and (chunkjson is None):
|
||||||
# 传递进来一些奇怪的东西
|
# 传递进来一些奇怪的东西
|
||||||
|
|||||||
@@ -202,16 +202,29 @@ class GoogleChatInit:
|
|||||||
) # 处理 history
|
) # 处理 history
|
||||||
|
|
||||||
messages.append(self.__conversation_user(inputs, llm_kwargs, enable_multimodal_capacity)) # 处理用户对话
|
messages.append(self.__conversation_user(inputs, llm_kwargs, enable_multimodal_capacity)) # 处理用户对话
|
||||||
payload = {
|
stop_sequences = str(llm_kwargs.get("stop", "")).split(" ")
|
||||||
"contents": messages,
|
# 过滤空字符串并确保至少有一个停止序列
|
||||||
"generationConfig": {
|
stop_sequences = [s for s in stop_sequences if s]
|
||||||
# "maxOutputTokens": llm_kwargs.get("max_token", 1024),
|
if not stop_sequences:
|
||||||
"stopSequences": str(llm_kwargs.get("stop", "")).split(" "),
|
payload = {
|
||||||
"temperature": llm_kwargs.get("temperature", 1),
|
"contents": messages,
|
||||||
"topP": llm_kwargs.get("top_p", 0.8),
|
"generationConfig": {
|
||||||
"topK": 10,
|
"temperature": llm_kwargs.get("temperature", 1),
|
||||||
},
|
"topP": llm_kwargs.get("top_p", 0.8),
|
||||||
}
|
"topK": 10,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
else:
|
||||||
|
payload = {
|
||||||
|
"contents": messages,
|
||||||
|
"generationConfig": {
|
||||||
|
# "maxOutputTokens": llm_kwargs.get("max_token", 1024),
|
||||||
|
"stopSequences": stop_sequences,
|
||||||
|
"temperature": llm_kwargs.get("temperature", 1),
|
||||||
|
"topP": llm_kwargs.get("top_p", 0.8),
|
||||||
|
"topK": 10,
|
||||||
|
},
|
||||||
|
}
|
||||||
|
|
||||||
return header, payload
|
return header, payload
|
||||||
|
|
||||||
|
|||||||
@@ -1070,6 +1070,14 @@ function restore_chat_from_local_storage(event) {
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
function clear_conversation(a, b, c) {
|
||||||
|
update_conversation_metadata();
|
||||||
|
let stopButton = document.getElementById("elem_stop");
|
||||||
|
stopButton.click();
|
||||||
|
// console.log("clear_conversation");
|
||||||
|
return reset_conversation(a, b);
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
function reset_conversation(a, b) {
|
function reset_conversation(a, b) {
|
||||||
// console.log("js_code_reset");
|
// console.log("js_code_reset");
|
||||||
|
|||||||
在新工单中引用
屏蔽一个用户