镜像自地址
https://github.com/binary-husky/gpt_academic.git
已同步 2025-12-06 14:36:48 +00:00
* logging sys to loguru: stage 1 complete * import loguru: stage 2 * logging -> loguru: stage 3 * support o1-preview and o1-mini * logging -> loguru stage 4 * update social helper * logging -> loguru: final stage * fix: console output * update translation matrix * fix: loguru argument error with proxy enabled (#1977) * relax llama index version * remove comment * Added some modules to support openrouter (#1975) * Added some modules for supporting openrouter model Added some modules for supporting openrouter model * Update config.py * Update .gitignore * Update bridge_openrouter.py * Not changed actually * Refactor logging in bridge_openrouter.py --------- Co-authored-by: binary-husky <qingxu.fu@outlook.com> * remove logging extra --------- Co-authored-by: Steven Moder <java20131114@gmail.com> Co-authored-by: Ren Lifei <2602264455@qq.com>
20 行
911 B
Python
20 行
911 B
Python
from crazy_functions.agent_fns.pipe import PluginMultiprocessManager, PipeCom
|
|
from loguru import logger
|
|
|
|
class EchoDemo(PluginMultiprocessManager):
|
|
def subprocess_worker(self, child_conn):
|
|
# ⭐⭐ 子进程
|
|
self.child_conn = child_conn
|
|
while True:
|
|
msg = self.child_conn.recv() # PipeCom
|
|
if msg.cmd == "user_input":
|
|
# wait futher user input
|
|
self.child_conn.send(PipeCom("show", msg.content))
|
|
wait_success = self.subprocess_worker_wait_user_feedback(wait_msg="我准备好处理下一个问题了.")
|
|
if not wait_success:
|
|
# wait timeout, terminate this subprocess_worker
|
|
break
|
|
elif msg.cmd == "terminate":
|
|
self.child_conn.send(PipeCom("done", ""))
|
|
break
|
|
logger.info('[debug] subprocess_worker terminated') |