fix temp issue of o1

这个提交包含在:
binary-husky
2024-12-25 00:54:03 +08:00
父节点 1dd1d0ed6c
当前提交 97a81e9388
共有 2 个文件被更改,包括 8 次插入2 次删除

查看文件

@@ -273,6 +273,7 @@ model_info = {
"token_cnt": get_token_num_gpt4, "token_cnt": get_token_num_gpt4,
"openai_disable_system_prompt": True, "openai_disable_system_prompt": True,
"openai_disable_stream": True, "openai_disable_stream": True,
"openai_force_temperature_one": True,
}, },
"o1-mini": { "o1-mini": {
@@ -284,6 +285,7 @@ model_info = {
"token_cnt": get_token_num_gpt4, "token_cnt": get_token_num_gpt4,
"openai_disable_system_prompt": True, "openai_disable_system_prompt": True,
"openai_disable_stream": True, "openai_disable_stream": True,
"openai_force_temperature_one": True,
}, },
"o1-2024-12-17": { "o1-2024-12-17": {
@@ -295,6 +297,7 @@ model_info = {
"token_cnt": get_token_num_gpt4, "token_cnt": get_token_num_gpt4,
"openai_disable_system_prompt": True, "openai_disable_system_prompt": True,
"openai_disable_stream": True, "openai_disable_stream": True,
"openai_force_temperature_one": True,
}, },
"o1": { "o1": {
@@ -306,6 +309,7 @@ model_info = {
"token_cnt": get_token_num_gpt4, "token_cnt": get_token_num_gpt4,
"openai_disable_system_prompt": True, "openai_disable_system_prompt": True,
"openai_disable_stream": True, "openai_disable_stream": True,
"openai_force_temperature_one": True,
}, },
"gpt-4-turbo": { "gpt-4-turbo": {

查看文件

@@ -351,7 +351,7 @@ def predict(inputs:str, llm_kwargs:dict, plugin_kwargs:dict, chatbot:ChatBotWith
raise ValueError(f'无法读取以下数据,请检查配置。\n\n{chunk_decoded}') raise ValueError(f'无法读取以下数据,请检查配置。\n\n{chunk_decoded}')
# 前者是API2D & One-API的结束条件,后者是OPENAI的结束条件 # 前者是API2D & One-API的结束条件,后者是OPENAI的结束条件
one_api_terminate = ('data: [DONE]' in chunk_decoded) one_api_terminate = ('data: [DONE]' in chunk_decoded)
openai_terminate = (len(chunkjson['choices'][0]["delta"]) == 0) openai_terminate = (has_choices) and (len(chunkjson['choices'][0]["delta"]) == 0)
if one_api_terminate or openai_terminate: if one_api_terminate or openai_terminate:
is_termination_certain = False is_termination_certain = False
if one_api_terminate: is_termination_certain = True # 抓取符合规范的结束条件 if one_api_terminate: is_termination_certain = True # 抓取符合规范的结束条件
@@ -563,6 +563,8 @@ def generate_payload(inputs:str, llm_kwargs:dict, history:list, system_prompt:st
"n": 1, "n": 1,
"stream": stream, "stream": stream,
} }
openai_force_temperature_one = model_info[llm_kwargs['llm_model']].get('openai_force_temperature_one', False)
if openai_force_temperature_one:
payload.pop('temperature')
return headers,payload return headers,payload