镜像自地址
https://github.com/binary-husky/gpt_academic.git
已同步 2025-12-06 06:26:47 +00:00
比较提交
2 次代码提交
version3.5
...
version3.6
| 作者 | SHA1 | 提交日期 | |
|---|---|---|---|
|
|
c7a0a5f207 | ||
|
|
b1be05009b |
@@ -1,10 +0,0 @@
|
||||
# See https://pre-commit.com for more information
|
||||
# See https://pre-commit.com/hooks.html for more hooks
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v3.2.0
|
||||
hooks:
|
||||
- id: trailing-whitespace
|
||||
- id: end-of-file-fixer
|
||||
- id: check-yaml
|
||||
- id: check-added-large-files
|
||||
@@ -5,7 +5,6 @@ def check_proxy(proxies):
|
||||
try:
|
||||
response = requests.get("https://ipapi.co/json/", proxies=proxies, timeout=4)
|
||||
data = response.json()
|
||||
# print(f'查询代理的地理位置,返回的结果是{data}')
|
||||
if 'country_name' in data:
|
||||
country = data['country_name']
|
||||
result = f"代理配置 {proxies_https}, 代理所在地:{country}"
|
||||
@@ -47,8 +46,8 @@ def backup_and_download(current_version, remote_version):
|
||||
os.makedirs(new_version_dir)
|
||||
shutil.copytree('./', backup_dir, ignore=lambda x, y: ['history'])
|
||||
proxies = get_conf('proxies')
|
||||
r = requests.get(
|
||||
'https://github.com/binary-husky/chatgpt_academic/archive/refs/heads/master.zip', proxies=proxies, stream=True)
|
||||
try: r = requests.get('https://github.com/binary-husky/chatgpt_academic/archive/refs/heads/master.zip', proxies=proxies, stream=True)
|
||||
except: r = requests.get('https://public.gpt-academic.top/publish/master.zip', proxies=proxies, stream=True)
|
||||
zip_file_path = backup_dir+'/master.zip'
|
||||
with open(zip_file_path, 'wb+') as f:
|
||||
f.write(r.content)
|
||||
@@ -111,11 +110,10 @@ def auto_update(raise_error=False):
|
||||
try:
|
||||
from toolbox import get_conf
|
||||
import requests
|
||||
import time
|
||||
import json
|
||||
proxies = get_conf('proxies')
|
||||
response = requests.get(
|
||||
"https://raw.githubusercontent.com/binary-husky/chatgpt_academic/master/version", proxies=proxies, timeout=5)
|
||||
try: response = requests.get("https://raw.githubusercontent.com/binary-husky/chatgpt_academic/master/version", proxies=proxies, timeout=5)
|
||||
except: response = requests.get("https://public.gpt-academic.top/publish/version", proxies=proxies, timeout=5)
|
||||
remote_json_data = json.loads(response.text)
|
||||
remote_version = remote_json_data['version']
|
||||
if remote_json_data["show_feature"]:
|
||||
@@ -127,8 +125,7 @@ def auto_update(raise_error=False):
|
||||
current_version = json.loads(current_version)['version']
|
||||
if (remote_version - current_version) >= 0.01-1e-5:
|
||||
from colorful import print亮黄
|
||||
print亮黄(
|
||||
f'\n新版本可用。新版本:{remote_version},当前版本:{current_version}。{new_feature}')
|
||||
print亮黄(f'\n新版本可用。新版本:{remote_version},当前版本:{current_version}。{new_feature}')
|
||||
print('(1)Github更新地址:\nhttps://github.com/binary-husky/chatgpt_academic\n')
|
||||
user_instruction = input('(2)是否一键更新代码(Y+回车=确认,输入其他/无输入+回车=不更新)?')
|
||||
if user_instruction in ['Y', 'y']:
|
||||
@@ -154,7 +151,7 @@ def auto_update(raise_error=False):
|
||||
print(msg)
|
||||
|
||||
def warm_up_modules():
|
||||
print('正在执行一些模块的预热...')
|
||||
print('正在执行一些模块的预热 ...')
|
||||
from toolbox import ProxyNetworkActivate
|
||||
from request_llms.bridge_all import model_info
|
||||
with ProxyNetworkActivate("Warmup_Modules"):
|
||||
|
||||
@@ -235,11 +235,6 @@ BLOCK_INVALID_APIKEY = False
|
||||
# 自定义按钮的最大数量限制
|
||||
NUM_CUSTOM_BASIC_BTN = 4
|
||||
|
||||
|
||||
# LATEX实验性功能
|
||||
LATEX_EXPERIMENTAL = False
|
||||
|
||||
|
||||
"""
|
||||
在线大模型配置关联关系示意图
|
||||
│
|
||||
|
||||
@@ -95,11 +95,14 @@ class LatexPaperSplit():
|
||||
self.abstract = "unknown"
|
||||
|
||||
def read_title_and_abstract(self, txt):
|
||||
title, abstract = find_title_and_abs(txt)
|
||||
if title is not None:
|
||||
self.title = title.replace('\n', ' ').replace('\\\\', ' ').replace(' ', '').replace(' ', '')
|
||||
if abstract is not None:
|
||||
self.abstract = abstract.replace('\n', ' ').replace('\\\\', ' ').replace(' ', '').replace(' ', '')
|
||||
try:
|
||||
title, abstract = find_title_and_abs(txt)
|
||||
if title is not None:
|
||||
self.title = title.replace('\n', ' ').replace('\\\\', ' ').replace(' ', '').replace(' ', '')
|
||||
if abstract is not None:
|
||||
self.abstract = abstract.replace('\n', ' ').replace('\\\\', ' ').replace(' ', '').replace(' ', '')
|
||||
except:
|
||||
pass
|
||||
|
||||
def merge_result(self, arr, mode, msg, buggy_lines=[], buggy_line_surgery_n_lines=10):
|
||||
"""
|
||||
@@ -265,12 +268,12 @@ def Latex精细分解与转化(file_manifest, project_folder, llm_kwargs, plugin
|
||||
|
||||
else:
|
||||
# <-------- gpt 多线程请求 ---------->
|
||||
LATEX_EXPERIMENTAL, = get_conf('LATEX_EXPERIMENTAL')
|
||||
history_array = [[""] for _ in range(n_split)]
|
||||
if LATEX_EXPERIMENTAL:
|
||||
paper_meta = f"The paper you processing is `{lps.title}`, a part of the abstraction is `{lps.abstract}`"
|
||||
paper_meta_max_len = 888
|
||||
history_array = [[ paper_meta[:paper_meta_max_len] + '...', "Understand, what should I do?"] for _ in range(n_split)]
|
||||
# LATEX_EXPERIMENTAL, = get_conf('LATEX_EXPERIMENTAL')
|
||||
# if LATEX_EXPERIMENTAL:
|
||||
# paper_meta = f"The paper you processing is `{lps.title}`, a part of the abstraction is `{lps.abstract}`"
|
||||
# paper_meta_max_len = 888
|
||||
# history_array = [[ paper_meta[:paper_meta_max_len] + '...', "Understand, what should I do?"] for _ in range(n_split)]
|
||||
|
||||
gpt_response_collection = yield from request_gpt_model_multi_threads_with_very_awesome_ui_and_high_efficiency(
|
||||
inputs_array=inputs_array,
|
||||
|
||||
@@ -352,6 +352,7 @@ def find_title_and_abs(main_file):
|
||||
title = extract_title(main_file)
|
||||
return title, abstract
|
||||
|
||||
|
||||
def merge_tex_files(project_foler, main_file, mode):
|
||||
"""
|
||||
Merge Tex project recrusively
|
||||
|
||||
15
toolbox.py
15
toolbox.py
@@ -544,14 +544,16 @@ def find_recent_files(directory):
|
||||
|
||||
|
||||
def file_already_in_downloadzone(file, user_path):
|
||||
parent_path = user_path
|
||||
child_path = file
|
||||
if os.path.commonpath([parent_path, child_path]) == parent_path:
|
||||
return True
|
||||
else:
|
||||
try:
|
||||
parent_path = os.path.abspath(user_path)
|
||||
child_path = os.path.abspath(file)
|
||||
if os.path.samefile(os.path.commonpath([parent_path, child_path]), parent_path):
|
||||
return True
|
||||
else:
|
||||
return False
|
||||
except:
|
||||
return False
|
||||
|
||||
|
||||
def promote_file_to_downloadzone(file, rename_file=None, chatbot=None):
|
||||
# 将文件复制一份到下载区
|
||||
import shutil
|
||||
@@ -564,6 +566,7 @@ def promote_file_to_downloadzone(file, rename_file=None, chatbot=None):
|
||||
if file_already_in_downloadzone(file, user_path):
|
||||
new_path = file
|
||||
else:
|
||||
user_path = get_log_folder(user_name, plugin_name='downloadzone')
|
||||
if rename_file is None: rename_file = f'{gen_time_str()}-{os.path.basename(file)}'
|
||||
new_path = pj(user_path, rename_file)
|
||||
# 如果已经存在,先删除
|
||||
|
||||
2
version
2
version
@@ -1,5 +1,5 @@
|
||||
{
|
||||
"version": 3.60,
|
||||
"show_feature": true,
|
||||
"new_feature": "11月12日紧急BUG修复 <-> AutoGen多智能体插件测试版 <-> 修复本地模型在Windows下的加载BUG <-> 支持文心一言v4和星火v3 <-> 支持GLM3和智谱的API <-> 解决本地模型并发BUG <-> 支持动态追加基础功能按钮 <-> 新汇报PDF汇总页面 <-> 重新编译Gradio优化使用体验"
|
||||
"new_feature": "修复多个BUG <-> AutoGen多智能体插件测试版 <-> 修复本地模型在Windows下的加载BUG <-> 支持文心一言v4和星火v3 <-> 支持GLM3和智谱的API <-> 解决本地模型并发BUG <-> 支持动态追加基础功能按钮 <-> 新汇报PDF汇总页面 <-> 重新编译Gradio优化使用体验"
|
||||
}
|
||||
|
||||
在新工单中引用
屏蔽一个用户