Merge branch 'master' into frontier

这个提交包含在:
binary-husky
2024-12-29 00:08:59 +08:00
当前提交 ca1ab57f5d
共有 3 个文件被更改,包括 33 次插入16 次删除

查看文件

@@ -170,7 +170,7 @@ def predict_no_ui_long_connection(inputs:str, llm_kwargs:dict, history:list=[],
except requests.exceptions.ConnectionError:
chunk = next(stream_response) # 失败了,重试一次?再失败就没办法了。
chunk_decoded, chunkjson, has_choices, choice_valid, has_content, has_role = decode_chunk(chunk)
if len(chunk_decoded)==0: continue
if len(chunk_decoded)==0 or chunk_decoded.startswith(':'): continue
if not chunk_decoded.startswith('data:'):
error_msg = get_full_error(chunk, stream_response).decode()
if "reduce the length" in error_msg:
@@ -181,9 +181,6 @@ def predict_no_ui_long_connection(inputs:str, llm_kwargs:dict, history:list=[],
raise RuntimeError("OpenAI拒绝了请求" + error_msg)
if ('data: [DONE]' in chunk_decoded): break # api2d 正常完成
# 提前读取一些信息 (用于判断异常)
if (has_choices and not choice_valid) or ('OPENROUTER PROCESSING' in chunk_decoded):
# 一些垃圾第三方接口的出现这样的错误,openrouter的特殊处理
continue
json_data = chunkjson['choices'][0]
delta = json_data["delta"]
if len(delta) == 0: break
@@ -328,8 +325,7 @@ def predict(inputs:str, llm_kwargs:dict, plugin_kwargs:dict, chatbot:ChatBotWith
if chunk:
try:
if (has_choices and not choice_valid) or ('OPENROUTER PROCESSING' in chunk_decoded):
# 一些垃圾第三方接口的出现这样的错误, 或者OPENROUTER的特殊处理,因为OPENROUTER的数据流未连接到模型时会出现OPENROUTER PROCESSING
if (has_choices and not choice_valid) or chunk_decoded.startswith(':'):
continue
if ('data: [DONE]' not in chunk_decoded) and len(chunk_decoded) > 0 and (chunkjson is None):
# 传递进来一些奇怪的东西

查看文件

@@ -202,11 +202,24 @@ class GoogleChatInit:
) # 处理 history
messages.append(self.__conversation_user(inputs, llm_kwargs, enable_multimodal_capacity)) # 处理用户对话
stop_sequences = str(llm_kwargs.get("stop", "")).split(" ")
# 过滤空字符串并确保至少有一个停止序列
stop_sequences = [s for s in stop_sequences if s]
if not stop_sequences:
payload = {
"contents": messages,
"generationConfig": {
"temperature": llm_kwargs.get("temperature", 1),
"topP": llm_kwargs.get("top_p", 0.8),
"topK": 10,
},
}
else:
payload = {
"contents": messages,
"generationConfig": {
# "maxOutputTokens": llm_kwargs.get("max_token", 1024),
"stopSequences": str(llm_kwargs.get("stop", "")).split(" "),
"stopSequences": stop_sequences,
"temperature": llm_kwargs.get("temperature", 1),
"topP": llm_kwargs.get("top_p", 0.8),
"topK": 10,

查看文件

@@ -1070,6 +1070,14 @@ function restore_chat_from_local_storage(event) {
}
function clear_conversation(a, b, c) {
update_conversation_metadata();
let stopButton = document.getElementById("elem_stop");
stopButton.click();
// console.log("clear_conversation");
return reset_conversation(a, b);
}
function reset_conversation(a, b) {
// console.log("js_code_reset");