Fix RuntimeError in predict_no_ui_long_connection() (#2095)

Bug fix: Fix RuntimeError in predict_no_ui_long_connection()

In the original code, calling predict_no_ui_long_connection() would trigger a RuntimeError("OpenAI拒绝了请求:" + error_msg) even when the server responded normally. The issue occurred due to incorrect handling of SSE protocol comment lines (lines starting with ":"). 

Modified the parsing logic in both `predict` and `predict_no_ui_long_connection` to handle these lines correctly, making the logic more intuitive and robust.
这个提交包含在:
whyXVI
2024-12-28 23:21:14 +08:00
提交者 GitHub
父节点 c53ddf65aa
当前提交 09a82a572d

查看文件

@@ -170,7 +170,7 @@ def predict_no_ui_long_connection(inputs:str, llm_kwargs:dict, history:list=[],
except requests.exceptions.ConnectionError: except requests.exceptions.ConnectionError:
chunk = next(stream_response) # 失败了,重试一次?再失败就没办法了。 chunk = next(stream_response) # 失败了,重试一次?再失败就没办法了。
chunk_decoded, chunkjson, has_choices, choice_valid, has_content, has_role = decode_chunk(chunk) chunk_decoded, chunkjson, has_choices, choice_valid, has_content, has_role = decode_chunk(chunk)
if len(chunk_decoded)==0: continue if len(chunk_decoded)==0 or chunk_decoded.startswith(':'): continue
if not chunk_decoded.startswith('data:'): if not chunk_decoded.startswith('data:'):
error_msg = get_full_error(chunk, stream_response).decode() error_msg = get_full_error(chunk, stream_response).decode()
if "reduce the length" in error_msg: if "reduce the length" in error_msg:
@@ -181,9 +181,6 @@ def predict_no_ui_long_connection(inputs:str, llm_kwargs:dict, history:list=[],
raise RuntimeError("OpenAI拒绝了请求" + error_msg) raise RuntimeError("OpenAI拒绝了请求" + error_msg)
if ('data: [DONE]' in chunk_decoded): break # api2d 正常完成 if ('data: [DONE]' in chunk_decoded): break # api2d 正常完成
# 提前读取一些信息 (用于判断异常) # 提前读取一些信息 (用于判断异常)
if (has_choices and not choice_valid) or ('OPENROUTER PROCESSING' in chunk_decoded):
# 一些垃圾第三方接口的出现这样的错误,openrouter的特殊处理
continue
json_data = chunkjson['choices'][0] json_data = chunkjson['choices'][0]
delta = json_data["delta"] delta = json_data["delta"]
if len(delta) == 0: break if len(delta) == 0: break
@@ -328,8 +325,7 @@ def predict(inputs:str, llm_kwargs:dict, plugin_kwargs:dict, chatbot:ChatBotWith
if chunk: if chunk:
try: try:
if (has_choices and not choice_valid) or ('OPENROUTER PROCESSING' in chunk_decoded): if (has_choices and not choice_valid) or chunk_decoded.startswith(':'):
# 一些垃圾第三方接口的出现这样的错误, 或者OPENROUTER的特殊处理,因为OPENROUTER的数据流未连接到模型时会出现OPENROUTER PROCESSING
continue continue
if ('data: [DONE]' not in chunk_decoded) and len(chunk_decoded) > 0 and (chunkjson is None): if ('data: [DONE]' not in chunk_decoded) and len(chunk_decoded) > 0 and (chunkjson is None):
# 传递进来一些奇怪的东西 # 传递进来一些奇怪的东西