比较提交

...

5 次代码提交

作者 SHA1 备注 提交日期
qingxu fu
1253a2b0a6 修正错误地把重名路径当成文件的bug 2023-11-23 15:37:00 +08:00
binary-husky
71537b570f Merge pull request #1315 from Harry67Hu/master
fix MacOS-zip bug
2023-11-22 16:49:22 +08:00
Harry67Hu
7754215dad fix MacOS-zip bug 2023-11-22 15:23:23 +08:00
qingxu fu
c7a0a5f207 引入更稳定的自动更新URL 2023-11-22 01:40:40 +08:00
qingxu fu
b1be05009b 移除冗余代码,修复多用户存档问题 2023-11-20 01:06:19 +08:00
共有 8 个文件被更改,包括 33 次插入44 次删除

查看文件

@@ -1,10 +0,0 @@
# See https://pre-commit.com for more information
# See https://pre-commit.com/hooks.html for more hooks
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v3.2.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
- id: check-yaml
- id: check-added-large-files

查看文件

@@ -5,7 +5,6 @@ def check_proxy(proxies):
try:
response = requests.get("https://ipapi.co/json/", proxies=proxies, timeout=4)
data = response.json()
# print(f'查询代理的地理位置,返回的结果是{data}')
if 'country_name' in data:
country = data['country_name']
result = f"代理配置 {proxies_https}, 代理所在地:{country}"
@@ -47,8 +46,8 @@ def backup_and_download(current_version, remote_version):
os.makedirs(new_version_dir)
shutil.copytree('./', backup_dir, ignore=lambda x, y: ['history'])
proxies = get_conf('proxies')
r = requests.get(
'https://github.com/binary-husky/chatgpt_academic/archive/refs/heads/master.zip', proxies=proxies, stream=True)
try: r = requests.get('https://github.com/binary-husky/chatgpt_academic/archive/refs/heads/master.zip', proxies=proxies, stream=True)
except: r = requests.get('https://public.gpt-academic.top/publish/master.zip', proxies=proxies, stream=True)
zip_file_path = backup_dir+'/master.zip'
with open(zip_file_path, 'wb+') as f:
f.write(r.content)
@@ -111,11 +110,10 @@ def auto_update(raise_error=False):
try:
from toolbox import get_conf
import requests
import time
import json
proxies = get_conf('proxies')
response = requests.get(
"https://raw.githubusercontent.com/binary-husky/chatgpt_academic/master/version", proxies=proxies, timeout=5)
try: response = requests.get("https://raw.githubusercontent.com/binary-husky/chatgpt_academic/master/version", proxies=proxies, timeout=5)
except: response = requests.get("https://public.gpt-academic.top/publish/version", proxies=proxies, timeout=5)
remote_json_data = json.loads(response.text)
remote_version = remote_json_data['version']
if remote_json_data["show_feature"]:
@@ -127,8 +125,7 @@ def auto_update(raise_error=False):
current_version = json.loads(current_version)['version']
if (remote_version - current_version) >= 0.01-1e-5:
from colorful import print亮黄
print亮黄(
f'\n新版本可用。新版本:{remote_version},当前版本:{current_version}{new_feature}')
print亮黄(f'\n新版本可用。新版本:{remote_version},当前版本:{current_version}{new_feature}')
print('1Github更新地址:\nhttps://github.com/binary-husky/chatgpt_academic\n')
user_instruction = input('2是否一键更新代码Y+回车=确认,输入其他/无输入+回车=不更新)?')
if user_instruction in ['Y', 'y']:
@@ -154,7 +151,7 @@ def auto_update(raise_error=False):
print(msg)
def warm_up_modules():
print('正在执行一些模块的预热...')
print('正在执行一些模块的预热 ...')
from toolbox import ProxyNetworkActivate
from request_llms.bridge_all import model_info
with ProxyNetworkActivate("Warmup_Modules"):

查看文件

@@ -235,11 +235,6 @@ BLOCK_INVALID_APIKEY = False
# 自定义按钮的最大数量限制
NUM_CUSTOM_BASIC_BTN = 4
# LATEX实验性功能
LATEX_EXPERIMENTAL = False
"""
在线大模型配置关联关系示意图

查看文件

@@ -73,6 +73,7 @@ def move_project(project_folder, arxiv_id=None):
# align subfolder if there is a folder wrapper
items = glob.glob(pj(project_folder,'*'))
items = [item for item in items if os.path.basename(item)!='__MACOSX']
if len(glob.glob(pj(project_folder,'*.tex'))) == 0 and len(items) == 1:
if os.path.isdir(items[0]): project_folder = items[0]
@@ -214,7 +215,6 @@ def Latex英文纠错加PDF对比(txt, llm_kwargs, plugin_kwargs, chatbot, histo
# <-------------- we are done ------------->
return success
# ========================================= 插件主程序2 =====================================================
@CatchException

查看文件

@@ -95,11 +95,14 @@ class LatexPaperSplit():
self.abstract = "unknown"
def read_title_and_abstract(self, txt):
title, abstract = find_title_and_abs(txt)
if title is not None:
self.title = title.replace('\n', ' ').replace('\\\\', ' ').replace(' ', '').replace(' ', '')
if abstract is not None:
self.abstract = abstract.replace('\n', ' ').replace('\\\\', ' ').replace(' ', '').replace(' ', '')
try:
title, abstract = find_title_and_abs(txt)
if title is not None:
self.title = title.replace('\n', ' ').replace('\\\\', ' ').replace(' ', '').replace(' ', '')
if abstract is not None:
self.abstract = abstract.replace('\n', ' ').replace('\\\\', ' ').replace(' ', '').replace(' ', '')
except:
pass
def merge_result(self, arr, mode, msg, buggy_lines=[], buggy_line_surgery_n_lines=10):
"""
@@ -265,12 +268,12 @@ def Latex精细分解与转化(file_manifest, project_folder, llm_kwargs, plugin
else:
# <-------- gpt 多线程请求 ---------->
LATEX_EXPERIMENTAL, = get_conf('LATEX_EXPERIMENTAL')
history_array = [[""] for _ in range(n_split)]
if LATEX_EXPERIMENTAL:
paper_meta = f"The paper you processing is `{lps.title}`, a part of the abstraction is `{lps.abstract}`"
paper_meta_max_len = 888
history_array = [[ paper_meta[:paper_meta_max_len] + '...', "Understand, what should I do?"] for _ in range(n_split)]
# LATEX_EXPERIMENTAL, = get_conf('LATEX_EXPERIMENTAL')
# if LATEX_EXPERIMENTAL:
# paper_meta = f"The paper you processing is `{lps.title}`, a part of the abstraction is `{lps.abstract}`"
# paper_meta_max_len = 888
# history_array = [[ paper_meta[:paper_meta_max_len] + '...', "Understand, what should I do?"] for _ in range(n_split)]
gpt_response_collection = yield from request_gpt_model_multi_threads_with_very_awesome_ui_and_high_efficiency(
inputs_array=inputs_array,

查看文件

@@ -283,10 +283,10 @@ def find_tex_file_ignore_case(fp):
dir_name = os.path.dirname(fp)
base_name = os.path.basename(fp)
# 如果输入的文件路径是正确的
if os.path.exists(pj(dir_name, base_name)): return pj(dir_name, base_name)
if os.path.isfile(pj(dir_name, base_name)): return pj(dir_name, base_name)
# 如果不正确,试着加上.tex后缀试试
if not base_name.endswith('.tex'): base_name+='.tex'
if os.path.exists(pj(dir_name, base_name)): return pj(dir_name, base_name)
if os.path.isfile(pj(dir_name, base_name)): return pj(dir_name, base_name)
# 如果还找不到,解除大小写限制,再试一次
import glob
for f in glob.glob(dir_name+'/*.tex'):
@@ -352,6 +352,7 @@ def find_title_and_abs(main_file):
title = extract_title(main_file)
return title, abstract
def merge_tex_files(project_foler, main_file, mode):
"""
Merge Tex project recrusively

查看文件

@@ -544,14 +544,16 @@ def find_recent_files(directory):
def file_already_in_downloadzone(file, user_path):
parent_path = user_path
child_path = file
if os.path.commonpath([parent_path, child_path]) == parent_path:
return True
else:
try:
parent_path = os.path.abspath(user_path)
child_path = os.path.abspath(file)
if os.path.samefile(os.path.commonpath([parent_path, child_path]), parent_path):
return True
else:
return False
except:
return False
def promote_file_to_downloadzone(file, rename_file=None, chatbot=None):
# 将文件复制一份到下载区
import shutil
@@ -564,6 +566,7 @@ def promote_file_to_downloadzone(file, rename_file=None, chatbot=None):
if file_already_in_downloadzone(file, user_path):
new_path = file
else:
user_path = get_log_folder(user_name, plugin_name='downloadzone')
if rename_file is None: rename_file = f'{gen_time_str()}-{os.path.basename(file)}'
new_path = pj(user_path, rename_file)
# 如果已经存在,先删除

查看文件

@@ -1,5 +1,5 @@
{
"version": 3.60,
"show_feature": true,
"new_feature": "11月12日紧急BUG修复 <-> AutoGen多智能体插件测试版 <-> 修复本地模型在Windows下的加载BUG <-> 支持文心一言v4和星火v3 <-> 支持GLM3和智谱的API <-> 解决本地模型并发BUG <-> 支持动态追加基础功能按钮 <-> 新汇报PDF汇总页面 <-> 重新编译Gradio优化使用体验"
"new_feature": "修复多个BUG <-> AutoGen多智能体插件测试版 <-> 修复本地模型在Windows下的加载BUG <-> 支持文心一言v4和星火v3 <-> 支持GLM3和智谱的API <-> 解决本地模型并发BUG <-> 支持动态追加基础功能按钮 <-> 新汇报PDF汇总页面 <-> 重新编译Gradio优化使用体验"
}