修复Gemini对话错误问题(停用词数量为0的情况) (#2092)

这个提交包含在:
Southlandi
2024-12-28 23:22:10 +08:00
提交者 GitHub
父节点 09a82a572d
当前提交 fd93622840

查看文件

@@ -202,11 +202,24 @@ class GoogleChatInit:
) # 处理 history ) # 处理 history
messages.append(self.__conversation_user(inputs, llm_kwargs, enable_multimodal_capacity)) # 处理用户对话 messages.append(self.__conversation_user(inputs, llm_kwargs, enable_multimodal_capacity)) # 处理用户对话
stop_sequences = str(llm_kwargs.get("stop", "")).split(" ")
# 过滤空字符串并确保至少有一个停止序列
stop_sequences = [s for s in stop_sequences if s]
if not stop_sequences:
payload = {
"contents": messages,
"generationConfig": {
"temperature": llm_kwargs.get("temperature", 1),
"topP": llm_kwargs.get("top_p", 0.8),
"topK": 10,
},
}
else:
payload = { payload = {
"contents": messages, "contents": messages,
"generationConfig": { "generationConfig": {
# "maxOutputTokens": llm_kwargs.get("max_token", 1024), # "maxOutputTokens": llm_kwargs.get("max_token", 1024),
"stopSequences": str(llm_kwargs.get("stop", "")).split(" "), "stopSequences": stop_sequences,
"temperature": llm_kwargs.get("temperature", 1), "temperature": llm_kwargs.get("temperature", 1),
"topP": llm_kwargs.get("top_p", 0.8), "topP": llm_kwargs.get("top_p", 0.8),
"topK": 10, "topK": 10,