new version

这个提交包含在:
binary-husky
2023-10-06 12:00:27 +08:00
父节点 c89c62b914
当前提交 971ac206f3
共有 44 个文件被更改,包括 1228 次插入360 次删除

查看文件

@@ -30,7 +30,7 @@ class GetONNXGLMHandle(LocalLLMHandle):
with open(os.path.expanduser('~/.cache/huggingface/token'), 'w') as f:
f.write(huggingface_token)
model_id = 'meta-llama/Llama-2-7b-chat-hf'
with ProxyNetworkActivate():
with ProxyNetworkActivate('Download_LLM'):
self._tokenizer = AutoTokenizer.from_pretrained(model_id, use_auth_token=huggingface_token)
# use fp16
model = AutoModelForCausalLM.from_pretrained(model_id, use_auth_token=huggingface_token).eval()