镜像自地址
https://github.com/binary-husky/gpt_academic.git
已同步 2025-12-06 06:26:47 +00:00
change some open fn encoding to utf-8
这个提交包含在:
@@ -559,7 +559,7 @@ def PDF翻译中文并重新编译PDF(txt, llm_kwargs, plugin_kwargs, chatbot, h
|
|||||||
project_folder = move_project(project_folder)
|
project_folder = move_project(project_folder)
|
||||||
|
|
||||||
# <-------------- set a hash tag for repeat-checking ------------->
|
# <-------------- set a hash tag for repeat-checking ------------->
|
||||||
with open(pj(project_folder, hash_tag + '.tag'), 'w') as f:
|
with open(pj(project_folder, hash_tag + '.tag'), 'w', encoding='utf8') as f:
|
||||||
f.write(hash_tag)
|
f.write(hash_tag)
|
||||||
f.close()
|
f.close()
|
||||||
|
|
||||||
|
|||||||
@@ -26,7 +26,7 @@ class GetLlamaHandle(LocalLLMHandle):
|
|||||||
import platform
|
import platform
|
||||||
huggingface_token, device = get_conf('HUGGINGFACE_ACCESS_TOKEN', 'LOCAL_MODEL_DEVICE')
|
huggingface_token, device = get_conf('HUGGINGFACE_ACCESS_TOKEN', 'LOCAL_MODEL_DEVICE')
|
||||||
assert len(huggingface_token) != 0, "没有填写 HUGGINGFACE_ACCESS_TOKEN"
|
assert len(huggingface_token) != 0, "没有填写 HUGGINGFACE_ACCESS_TOKEN"
|
||||||
with open(os.path.expanduser('~/.cache/huggingface/token'), 'w') as f:
|
with open(os.path.expanduser('~/.cache/huggingface/token'), 'w', encoding='utf8') as f:
|
||||||
f.write(huggingface_token)
|
f.write(huggingface_token)
|
||||||
model_id = 'meta-llama/Llama-2-7b-chat-hf'
|
model_id = 'meta-llama/Llama-2-7b-chat-hf'
|
||||||
with ProxyNetworkActivate('Download_LLM'):
|
with ProxyNetworkActivate('Download_LLM'):
|
||||||
|
|||||||
@@ -31,7 +31,7 @@ class MoonShotInit:
|
|||||||
files.append(f)
|
files.append(f)
|
||||||
for file in files:
|
for file in files:
|
||||||
if file.split('.')[-1] in ['pdf']:
|
if file.split('.')[-1] in ['pdf']:
|
||||||
with open(file, 'r') as fp:
|
with open(file, 'r', encoding='utf8') as fp:
|
||||||
from crazy_functions.crazy_utils import read_and_clean_pdf_text
|
from crazy_functions.crazy_utils import read_and_clean_pdf_text
|
||||||
file_content, _ = read_and_clean_pdf_text(fp)
|
file_content, _ = read_and_clean_pdf_text(fp)
|
||||||
what_ask.append({"role": "system", "content": file_content})
|
what_ask.append({"role": "system", "content": file_content})
|
||||||
|
|||||||
@@ -29,7 +29,7 @@ def minimize_js(common_js_path):
|
|||||||
os.remove(old_min_js)
|
os.remove(old_min_js)
|
||||||
# use rjsmin to minimize `common_js_path`
|
# use rjsmin to minimize `common_js_path`
|
||||||
c_jsmin = rjsmin.jsmin
|
c_jsmin = rjsmin.jsmin
|
||||||
with open(common_js_path, "r") as f:
|
with open(common_js_path, "r", encoding='utf-8') as f:
|
||||||
js_content = f.read()
|
js_content = f.read()
|
||||||
if common_js_path == "themes/common.js":
|
if common_js_path == "themes/common.js":
|
||||||
js_content = inject_mutex_button_code(js_content)
|
js_content = inject_mutex_button_code(js_content)
|
||||||
@@ -38,7 +38,7 @@ def minimize_js(common_js_path):
|
|||||||
sha_hash = hashlib.sha256(minimized_js_content.encode()).hexdigest()[:8]
|
sha_hash = hashlib.sha256(minimized_js_content.encode()).hexdigest()[:8]
|
||||||
minimized_js_path = common_js_path + '.min.' + sha_hash + '.js'
|
minimized_js_path = common_js_path + '.min.' + sha_hash + '.js'
|
||||||
# save to minimized js file
|
# save to minimized js file
|
||||||
with open(minimized_js_path, "w") as f:
|
with open(minimized_js_path, "w", encoding='utf-8') as f:
|
||||||
f.write(minimized_js_content)
|
f.write(minimized_js_content)
|
||||||
# return minimized js file path
|
# return minimized js file path
|
||||||
return minimized_js_path
|
return minimized_js_path
|
||||||
|
|||||||
在新工单中引用
屏蔽一个用户