镜像自地址
https://github.com/binary-husky/gpt_academic.git
已同步 2025-12-06 06:26:47 +00:00
修复本地模型在windows上的兼容性
这个提交包含在:
@@ -8,7 +8,7 @@ import threading
|
||||
import importlib
|
||||
from toolbox import update_ui, get_conf
|
||||
from multiprocessing import Process, Pipe
|
||||
from .local_llm_class import LocalLLMHandle, get_local_llm_predict_fns, SingletonLocalLLM
|
||||
from .local_llm_class import LocalLLMHandle, get_local_llm_predict_fns
|
||||
|
||||
from .chatglmoonx import ChatGLMModel, chat_template
|
||||
|
||||
@@ -17,7 +17,6 @@ from .chatglmoonx import ChatGLMModel, chat_template
|
||||
# ------------------------------------------------------------------------------------------------------------------------
|
||||
# 🔌💻 Local Model
|
||||
# ------------------------------------------------------------------------------------------------------------------------
|
||||
@SingletonLocalLLM
|
||||
class GetONNXGLMHandle(LocalLLMHandle):
|
||||
|
||||
def load_model_info(self):
|
||||
|
||||
在新工单中引用
屏蔽一个用户