文件
gpt_academic/tests/test_llms.py
littleolaf 72dbe856d2 添加接入 火山引擎在线大模型 内容的支持 (#2165)
* use oai adaptive bridge function to handle vol engine

* add vol engine deepseek v3

---------

Co-authored-by: binary-husky <qingxu.fu@outlook.com>
2025-03-04 23:58:03 +08:00

76 行
3.3 KiB
Python

# """
# 对各个llm模型进行单元测试
# """
def validate_path():
import os, sys
os.path.dirname(__file__)
root_dir_assume = os.path.abspath(os.path.dirname(__file__) + "/..")
os.chdir(root_dir_assume)
sys.path.append(root_dir_assume)
validate_path() # validate path so you can run from base directory
if __name__ == "__main__":
# from request_llms.bridge_taichu import predict_no_ui_long_connection
from request_llms.bridge_volcengine import predict_no_ui_long_connection
# from request_llms.bridge_cohere import predict_no_ui_long_connection
# from request_llms.bridge_spark import predict_no_ui_long_connection
# from request_llms.bridge_zhipu import predict_no_ui_long_connection
# from request_llms.bridge_chatglm3 import predict_no_ui_long_connection
llm_kwargs = {
"llm_model": "volcengine",
"max_length": 4096,
"top_p": 1,
"temperature": 1,
}
result = predict_no_ui_long_connection(
inputs="请问什么是质子?", llm_kwargs=llm_kwargs, history=["你好", "我好!"], sys_prompt="系统"
)
print("final result:", result)
print("final result:", result)
# if "在线模型":
# if __name__ == "__main__":
# # from request_llms.bridge_taichu import predict_no_ui_long_connection
# from request_llms.bridge_volcengine import predict_no_ui_long_connection
# # from request_llms.bridge_cohere import predict_no_ui_long_connection
# # from request_llms.bridge_spark import predict_no_ui_long_connection
# # from request_llms.bridge_zhipu import predict_no_ui_long_connection
# # from request_llms.bridge_chatglm3 import predict_no_ui_long_connection
# llm_kwargs = {
# "llm_model": "ep-20250222011816-5cq8z",
# "max_length": 4096,
# "top_p": 1,
# "temperature": 1,
# }
# result = predict_no_ui_long_connection(
# inputs="请问什么是质子?", llm_kwargs=llm_kwargs, history=["你好", "我好!"], sys_prompt="系统"
# )
# print("final result:", result)
# print("final result:", result)
# if "本地模型":
# if __name__ == "__main__":
# # from request_llms.bridge_newbingfree import predict_no_ui_long_connection
# # from request_llms.bridge_moss import predict_no_ui_long_connection
# # from request_llms.bridge_jittorllms_pangualpha import predict_no_ui_long_connection
# # from request_llms.bridge_jittorllms_llama import predict_no_ui_long_connection
# # from request_llms.bridge_claude import predict_no_ui_long_connection
# # from request_llms.bridge_internlm import predict_no_ui_long_connection
# # from request_llms.bridge_deepseekcoder import predict_no_ui_long_connection
# # from request_llms.bridge_qwen_7B import predict_no_ui_long_connection
# # from request_llms.bridge_qwen_local import predict_no_ui_long_connection
# llm_kwargs = {
# "max_length": 4096,
# "top_p": 1,
# "temperature": 1,
# }
# result = predict_no_ui_long_connection(
# inputs="请问什么是质子?", llm_kwargs=llm_kwargs, history=["你好", "我好!"], sys_prompt=""
# )
# print("final result:", result)