镜像自地址
https://github.com/binary-husky/gpt_academic.git
已同步 2025-12-06 22:46:48 +00:00
@@ -19,7 +19,7 @@ def get_core_functions():
|
|||||||
# 按钮是否可见 (默认 True,即可见)
|
# 按钮是否可见 (默认 True,即可见)
|
||||||
"Visible": True,
|
"Visible": True,
|
||||||
# 是否在触发时清除历史 (默认 False,即不处理之前的对话历史)
|
# 是否在触发时清除历史 (默认 False,即不处理之前的对话历史)
|
||||||
"AutoClearHistory": True
|
"AutoClearHistory": False
|
||||||
},
|
},
|
||||||
"中文学术润色": {
|
"中文学术润色": {
|
||||||
"Prefix": r"作为一名中文学术论文写作改进助理,你的任务是改进所提供文本的拼写、语法、清晰、简洁和整体可读性," +
|
"Prefix": r"作为一名中文学术论文写作改进助理,你的任务是改进所提供文本的拼写、语法、清晰、简洁和整体可读性," +
|
||||||
@@ -83,11 +83,12 @@ def get_core_functions():
|
|||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
def handle_core_functionality(additional_fn, inputs, history):
|
def handle_core_functionality(additional_fn, inputs, history, chatbot):
|
||||||
import core_functional
|
import core_functional
|
||||||
importlib.reload(core_functional) # 热更新prompt
|
importlib.reload(core_functional) # 热更新prompt
|
||||||
core_functional = core_functional.get_core_functions()
|
core_functional = core_functional.get_core_functions()
|
||||||
if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话)
|
if "PreProcess" in core_functional[additional_fn]: inputs = core_functional[additional_fn]["PreProcess"](inputs) # 获取预处理函数(如果有的话)
|
||||||
inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"]
|
inputs = core_functional[additional_fn]["Prefix"] + inputs + core_functional[additional_fn]["Suffix"]
|
||||||
history = [] if core_functional[additional_fn].get("AutoClearHistory", False) else history
|
if core_functional[additional_fn].get("AutoClearHistory", False):
|
||||||
|
history = []
|
||||||
return inputs, history
|
return inputs, history
|
||||||
|
|||||||
@@ -145,7 +145,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
|
|||||||
|
|
||||||
if additional_fn is not None:
|
if additional_fn is not None:
|
||||||
from core_functional import handle_core_functionality
|
from core_functional import handle_core_functionality
|
||||||
inputs, history = handle_core_functionality(additional_fn, inputs, history)
|
inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot)
|
||||||
|
|
||||||
# 处理历史信息
|
# 处理历史信息
|
||||||
history_feedin = []
|
history_feedin = []
|
||||||
|
|||||||
@@ -186,7 +186,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
|
|||||||
|
|
||||||
if additional_fn is not None:
|
if additional_fn is not None:
|
||||||
from core_functional import handle_core_functionality
|
from core_functional import handle_core_functionality
|
||||||
inputs, history = handle_core_functionality(additional_fn, inputs, history)
|
inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot)
|
||||||
|
|
||||||
# 处理历史信息
|
# 处理历史信息
|
||||||
history_feedin = []
|
history_feedin = []
|
||||||
|
|||||||
@@ -130,7 +130,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
|
|||||||
|
|
||||||
if additional_fn is not None:
|
if additional_fn is not None:
|
||||||
from core_functional import handle_core_functionality
|
from core_functional import handle_core_functionality
|
||||||
inputs, history = handle_core_functionality(additional_fn, inputs, history)
|
inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot)
|
||||||
|
|
||||||
raw_input = inputs
|
raw_input = inputs
|
||||||
logging.info(f'[raw_input] {raw_input}')
|
logging.info(f'[raw_input] {raw_input}')
|
||||||
|
|||||||
@@ -117,7 +117,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
|
|||||||
|
|
||||||
if additional_fn is not None:
|
if additional_fn is not None:
|
||||||
from core_functional import handle_core_functionality
|
from core_functional import handle_core_functionality
|
||||||
inputs, history = handle_core_functionality(additional_fn, inputs, history)
|
inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot)
|
||||||
|
|
||||||
raw_input = inputs
|
raw_input = inputs
|
||||||
logging.info(f'[raw_input] {raw_input}')
|
logging.info(f'[raw_input] {raw_input}')
|
||||||
|
|||||||
@@ -291,7 +291,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
|
|||||||
|
|
||||||
if additional_fn is not None:
|
if additional_fn is not None:
|
||||||
from core_functional import handle_core_functionality
|
from core_functional import handle_core_functionality
|
||||||
inputs, history = handle_core_functionality(additional_fn, inputs, history)
|
inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot)
|
||||||
|
|
||||||
# 处理历史信息
|
# 处理历史信息
|
||||||
history_feedin = []
|
history_feedin = []
|
||||||
|
|||||||
@@ -155,7 +155,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
|
|||||||
|
|
||||||
if additional_fn is not None:
|
if additional_fn is not None:
|
||||||
from core_functional import handle_core_functionality
|
from core_functional import handle_core_functionality
|
||||||
inputs, history = handle_core_functionality(additional_fn, inputs, history)
|
inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot)
|
||||||
|
|
||||||
# 处理历史信息
|
# 处理历史信息
|
||||||
history_feedin = []
|
history_feedin = []
|
||||||
|
|||||||
@@ -155,7 +155,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
|
|||||||
|
|
||||||
if additional_fn is not None:
|
if additional_fn is not None:
|
||||||
from core_functional import handle_core_functionality
|
from core_functional import handle_core_functionality
|
||||||
inputs, history = handle_core_functionality(additional_fn, inputs, history)
|
inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot)
|
||||||
|
|
||||||
# 处理历史信息
|
# 处理历史信息
|
||||||
history_feedin = []
|
history_feedin = []
|
||||||
|
|||||||
@@ -155,7 +155,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
|
|||||||
|
|
||||||
if additional_fn is not None:
|
if additional_fn is not None:
|
||||||
from core_functional import handle_core_functionality
|
from core_functional import handle_core_functionality
|
||||||
inputs, history = handle_core_functionality(additional_fn, inputs, history)
|
inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot)
|
||||||
|
|
||||||
# 处理历史信息
|
# 处理历史信息
|
||||||
history_feedin = []
|
history_feedin = []
|
||||||
|
|||||||
@@ -225,7 +225,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
|
|||||||
|
|
||||||
if additional_fn is not None:
|
if additional_fn is not None:
|
||||||
from core_functional import handle_core_functionality
|
from core_functional import handle_core_functionality
|
||||||
inputs, history = handle_core_functionality(additional_fn, inputs, history)
|
inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot)
|
||||||
|
|
||||||
# 处理历史信息
|
# 处理历史信息
|
||||||
history_feedin = []
|
history_feedin = []
|
||||||
|
|||||||
@@ -225,7 +225,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
|
|||||||
|
|
||||||
if additional_fn is not None:
|
if additional_fn is not None:
|
||||||
from core_functional import handle_core_functionality
|
from core_functional import handle_core_functionality
|
||||||
inputs, history = handle_core_functionality(additional_fn, inputs, history)
|
inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot)
|
||||||
|
|
||||||
history_feedin = []
|
history_feedin = []
|
||||||
for i in range(len(history)//2):
|
for i in range(len(history)//2):
|
||||||
|
|||||||
@@ -249,7 +249,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
|
|||||||
|
|
||||||
if additional_fn is not None:
|
if additional_fn is not None:
|
||||||
from core_functional import handle_core_functionality
|
from core_functional import handle_core_functionality
|
||||||
inputs, history = handle_core_functionality(additional_fn, inputs, history)
|
inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot)
|
||||||
|
|
||||||
history_feedin = []
|
history_feedin = []
|
||||||
for i in range(len(history)//2):
|
for i in range(len(history)//2):
|
||||||
|
|||||||
@@ -97,7 +97,7 @@ def predict(inputs, llm_kwargs, plugin_kwargs, chatbot, history=[], system_promp
|
|||||||
"""
|
"""
|
||||||
if additional_fn is not None:
|
if additional_fn is not None:
|
||||||
from core_functional import handle_core_functionality
|
from core_functional import handle_core_functionality
|
||||||
inputs, history = handle_core_functionality(additional_fn, inputs, history)
|
inputs, history = handle_core_functionality(additional_fn, inputs, history, chatbot)
|
||||||
|
|
||||||
raw_input = "What I would like to say is the following: " + inputs
|
raw_input = "What I would like to say is the following: " + inputs
|
||||||
history.extend([inputs, ""])
|
history.extend([inputs, ""])
|
||||||
|
|||||||
在新工单中引用
屏蔽一个用户