From bff87ada924011385096862cbffee1c1bfdd3d52 Mon Sep 17 00:00:00 2001 From: QiyuanChen <72334646+qychen2001@users.noreply.github.com> Date: Fri, 24 May 2024 00:16:26 +0800 Subject: [PATCH] =?UTF-8?q?=E6=B7=BB=E5=8A=A0=E5=AF=B9ERNIE-Speed=E5=92=8C?= =?UTF-8?q?ERNIE-Lite=E6=A8=A1=E5=9E=8B=E7=9A=84=E6=94=AF=E6=8C=81=20(#182?= =?UTF-8?q?1)?= MIME-Version: 1.0 Content-Type: text/plain; charset=UTF-8 Content-Transfer-Encoding: 8bit * feat: add ERNIE-Speed and ERNIE-Lite 百度的ERNIE-Speed and ERNIE-Lite模型开始免费使用了,故添加了调用地址。可以使用ERNIE-Speed-128K,ERNIE-Speed-8K,ERNIE-Lite-8K来访问 * chore: Modify supported models in config.py 修改了config.py中千帆支持的模型列表,添加了三款免费模型 --- config.py | 2 +- request_llms/bridge_qianfan.py | 5 ++++- 2 files changed, 5 insertions(+), 2 deletions(-) diff --git a/config.py b/config.py index ad206d02..d00b3563 100644 --- a/config.py +++ b/config.py @@ -134,7 +134,7 @@ DASHSCOPE_API_KEY = "" # 阿里灵积云API_KEY # 百度千帆(LLM_MODEL="qianfan") BAIDU_CLOUD_API_KEY = '' BAIDU_CLOUD_SECRET_KEY = '' -BAIDU_CLOUD_QIANFAN_MODEL = 'ERNIE-Bot' # 可选 "ERNIE-Bot-4"(文心大模型4.0), "ERNIE-Bot"(文心一言), "ERNIE-Bot-turbo", "BLOOMZ-7B", "Llama-2-70B-Chat", "Llama-2-13B-Chat", "Llama-2-7B-Chat" +BAIDU_CLOUD_QIANFAN_MODEL = 'ERNIE-Bot' # 可选 "ERNIE-Bot-4"(文心大模型4.0), "ERNIE-Bot"(文心一言), "ERNIE-Bot-turbo", "BLOOMZ-7B", "Llama-2-70B-Chat", "Llama-2-13B-Chat", "Llama-2-7B-Chat", "ERNIE-Speed-128K", "ERNIE-Speed-8K", "ERNIE-Lite-8K" # 如果使用ChatGLM2微调模型,请把 LLM_MODEL="chatglmft",并在此处指定模型路径 diff --git a/request_llms/bridge_qianfan.py b/request_llms/bridge_qianfan.py index 76cea3c2..a65adb3a 100644 --- a/request_llms/bridge_qianfan.py +++ b/request_llms/bridge_qianfan.py @@ -82,6 +82,9 @@ def generate_from_baidu_qianfan(inputs, llm_kwargs, history, system_prompt): "ERNIE-Bot": "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/completions", "ERNIE-Bot-turbo": "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/eb-instant", "BLOOMZ-7B": "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/bloomz_7b1", + "ERNIE-Speed-128K": "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/ernie-speed-128k", + "ERNIE-Speed-8K": "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/ernie_speed", + "ERNIE-Lite-8K": "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/ernie-lite-8k", "Llama-2-70B-Chat": "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/llama_2_70b", "Llama-2-13B-Chat": "https://aip.baidubce.com/rpc/2.0/ai_custom/v1/wenxinworkshop/chat/llama_2_13b", @@ -165,4 +168,4 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp tb_str = '```\n' + trimmed_format_exc() + '```' chatbot[-1] = (chatbot[-1][0], tb_str) yield from update_ui(chatbot=chatbot, history=history, msg="异常") # 刷新界面 - return \ No newline at end of file + return