镜像自地址
https://github.com/binary-husky/gpt_academic.git
已同步 2025-12-08 07:26:48 +00:00
添加接入 火山引擎在线大模型 内容的支持 (#2165)
* use oai adaptive bridge function to handle vol engine * add vol engine deepseek v3 --------- Co-authored-by: binary-husky <qingxu.fu@outlook.com>
这个提交包含在:
@@ -11,46 +11,65 @@ def validate_path():
|
||||
|
||||
|
||||
validate_path() # validate path so you can run from base directory
|
||||
if __name__ == "__main__":
|
||||
# from request_llms.bridge_taichu import predict_no_ui_long_connection
|
||||
from request_llms.bridge_volcengine import predict_no_ui_long_connection
|
||||
# from request_llms.bridge_cohere import predict_no_ui_long_connection
|
||||
# from request_llms.bridge_spark import predict_no_ui_long_connection
|
||||
# from request_llms.bridge_zhipu import predict_no_ui_long_connection
|
||||
# from request_llms.bridge_chatglm3 import predict_no_ui_long_connection
|
||||
llm_kwargs = {
|
||||
"llm_model": "volcengine",
|
||||
"max_length": 4096,
|
||||
"top_p": 1,
|
||||
"temperature": 1,
|
||||
}
|
||||
|
||||
if "在线模型":
|
||||
if __name__ == "__main__":
|
||||
from request_llms.bridge_taichu import predict_no_ui_long_connection
|
||||
# from request_llms.bridge_cohere import predict_no_ui_long_connection
|
||||
# from request_llms.bridge_spark import predict_no_ui_long_connection
|
||||
# from request_llms.bridge_zhipu import predict_no_ui_long_connection
|
||||
# from request_llms.bridge_chatglm3 import predict_no_ui_long_connection
|
||||
llm_kwargs = {
|
||||
"llm_model": "taichu",
|
||||
"max_length": 4096,
|
||||
"top_p": 1,
|
||||
"temperature": 1,
|
||||
}
|
||||
result = predict_no_ui_long_connection(
|
||||
inputs="请问什么是质子?", llm_kwargs=llm_kwargs, history=["你好", "我好!"], sys_prompt="系统"
|
||||
)
|
||||
print("final result:", result)
|
||||
print("final result:", result)
|
||||
# if "在线模型":
|
||||
# if __name__ == "__main__":
|
||||
# # from request_llms.bridge_taichu import predict_no_ui_long_connection
|
||||
# from request_llms.bridge_volcengine import predict_no_ui_long_connection
|
||||
# # from request_llms.bridge_cohere import predict_no_ui_long_connection
|
||||
# # from request_llms.bridge_spark import predict_no_ui_long_connection
|
||||
# # from request_llms.bridge_zhipu import predict_no_ui_long_connection
|
||||
# # from request_llms.bridge_chatglm3 import predict_no_ui_long_connection
|
||||
# llm_kwargs = {
|
||||
# "llm_model": "ep-20250222011816-5cq8z",
|
||||
# "max_length": 4096,
|
||||
# "top_p": 1,
|
||||
# "temperature": 1,
|
||||
# }
|
||||
|
||||
result = predict_no_ui_long_connection(
|
||||
inputs="请问什么是质子?", llm_kwargs=llm_kwargs, history=["你好", "我好!"], sys_prompt="系统"
|
||||
)
|
||||
print("final result:", result)
|
||||
print("final result:", result)
|
||||
# result = predict_no_ui_long_connection(
|
||||
# inputs="请问什么是质子?", llm_kwargs=llm_kwargs, history=["你好", "我好!"], sys_prompt="系统"
|
||||
# )
|
||||
# print("final result:", result)
|
||||
# print("final result:", result)
|
||||
|
||||
|
||||
if "本地模型":
|
||||
if __name__ == "__main__":
|
||||
# from request_llms.bridge_newbingfree import predict_no_ui_long_connection
|
||||
# from request_llms.bridge_moss import predict_no_ui_long_connection
|
||||
# from request_llms.bridge_jittorllms_pangualpha import predict_no_ui_long_connection
|
||||
# from request_llms.bridge_jittorllms_llama import predict_no_ui_long_connection
|
||||
# from request_llms.bridge_claude import predict_no_ui_long_connection
|
||||
# from request_llms.bridge_internlm import predict_no_ui_long_connection
|
||||
# from request_llms.bridge_deepseekcoder import predict_no_ui_long_connection
|
||||
# from request_llms.bridge_qwen_7B import predict_no_ui_long_connection
|
||||
# from request_llms.bridge_qwen_local import predict_no_ui_long_connection
|
||||
llm_kwargs = {
|
||||
"max_length": 4096,
|
||||
"top_p": 1,
|
||||
"temperature": 1,
|
||||
}
|
||||
result = predict_no_ui_long_connection(
|
||||
inputs="请问什么是质子?", llm_kwargs=llm_kwargs, history=["你好", "我好!"], sys_prompt=""
|
||||
)
|
||||
print("final result:", result)
|
||||
# if "本地模型":
|
||||
# if __name__ == "__main__":
|
||||
# # from request_llms.bridge_newbingfree import predict_no_ui_long_connection
|
||||
# # from request_llms.bridge_moss import predict_no_ui_long_connection
|
||||
# # from request_llms.bridge_jittorllms_pangualpha import predict_no_ui_long_connection
|
||||
# # from request_llms.bridge_jittorllms_llama import predict_no_ui_long_connection
|
||||
# # from request_llms.bridge_claude import predict_no_ui_long_connection
|
||||
# # from request_llms.bridge_internlm import predict_no_ui_long_connection
|
||||
# # from request_llms.bridge_deepseekcoder import predict_no_ui_long_connection
|
||||
# # from request_llms.bridge_qwen_7B import predict_no_ui_long_connection
|
||||
# # from request_llms.bridge_qwen_local import predict_no_ui_long_connection
|
||||
# llm_kwargs = {
|
||||
# "max_length": 4096,
|
||||
# "top_p": 1,
|
||||
# "temperature": 1,
|
||||
# }
|
||||
# result = predict_no_ui_long_connection(
|
||||
# inputs="请问什么是质子?", llm_kwargs=llm_kwargs, history=["你好", "我好!"], sys_prompt=""
|
||||
# )
|
||||
# print("final result:", result)
|
||||
|
||||
|
||||
在新工单中引用
屏蔽一个用户