镜像自地址
https://github.com/binary-husky/gpt_academic.git
已同步 2025-12-06 14:36:48 +00:00
Update GithubAction+ChatGLM+Moss
这个提交包含在:
@@ -21,16 +21,12 @@ RUN python3 -m pip install -r request_llm/requirements_moss.txt
|
|||||||
RUN python3 -m pip install -r request_llm/requirements_chatglm.txt
|
RUN python3 -m pip install -r request_llm/requirements_chatglm.txt
|
||||||
RUN python3 -m pip install -r request_llm/requirements_newbing.txt
|
RUN python3 -m pip install -r request_llm/requirements_newbing.txt
|
||||||
|
|
||||||
# 预热CHATGLM参数(非必要 可选步骤)
|
# # 预热CHATGLM参数(非必要 可选步骤)
|
||||||
RUN echo ' \n\
|
# RUN echo ' \n\
|
||||||
from transformers import AutoModel, AutoTokenizer \n\
|
# from transformers import AutoModel, AutoTokenizer \n\
|
||||||
chatglm_tokenizer = AutoTokenizer.from_pretrained("THUDM/chatglm-6b", trust_remote_code=True) \n\
|
# chatglm_tokenizer = AutoTokenizer.from_pretrained("THUDM/chatglm-6b", trust_remote_code=True) \n\
|
||||||
chatglm_model = AutoModel.from_pretrained("THUDM/chatglm-6b", trust_remote_code=True).float() ' >> warm_up_chatglm.py
|
# chatglm_model = AutoModel.from_pretrained("THUDM/chatglm-6b", trust_remote_code=True).float() ' >> warm_up_chatglm.py
|
||||||
RUN python3 -u warm_up_chatglm.py
|
# RUN python3 -u warm_up_chatglm.py
|
||||||
|
|
||||||
# 禁用缓存,确保更新代码
|
|
||||||
ADD "https://www.random.org/cgi-bin/randbyte?nbytes=10&format=h" skipcache
|
|
||||||
RUN git pull
|
|
||||||
|
|
||||||
# 预热Tiktoken模块
|
# 预热Tiktoken模块
|
||||||
RUN python3 -c 'from check_proxy import warm_up_modules; warm_up_modules()'
|
RUN python3 -c 'from check_proxy import warm_up_modules; warm_up_modules()'
|
||||||
|
|||||||
在新工单中引用
屏蔽一个用户