diff --git a/.github/workflows/build-with-latex-arm.yml b/.github/workflows/build-with-latex-arm.yml index 2f00457a..ac20afad 100644 --- a/.github/workflows/build-with-latex-arm.yml +++ b/.github/workflows/build-with-latex-arm.yml @@ -46,6 +46,6 @@ jobs: context: . push: true platforms: linux/arm64 - file: docs/GithubAction+NoLocal+Latex + file: docs/GithubAction+NoLocal+Latex+Arm tags: ${{ steps.meta.outputs.tags }} labels: ${{ steps.meta.outputs.labels }} \ No newline at end of file diff --git a/docs/GithubAction+NoLocal+Latex b/docs/GithubAction+NoLocal+Latex index 533c6e35..00a3b6ab 100644 --- a/docs/GithubAction+NoLocal+Latex +++ b/docs/GithubAction+NoLocal+Latex @@ -3,19 +3,33 @@ # - 2 构建 docker build -t gpt-academic-nolocal-latex -f docs/GithubAction+NoLocal+Latex . # - 3 运行 docker run -v /home/fuqingxu/arxiv_cache:/root/arxiv_cache --rm -it --net=host gpt-academic-nolocal-latex -FROM menghuan1918/ubuntu_uv_ctex:latest -ENV DEBIAN_FRONTEND=noninteractive -SHELL ["/bin/bash", "-c"] +FROM fuqingxu/python311_texlive_ctex:latest +ENV PATH "$PATH:/usr/local/texlive/2022/bin/x86_64-linux" +ENV PATH "$PATH:/usr/local/texlive/2023/bin/x86_64-linux" +ENV PATH "$PATH:/usr/local/texlive/2024/bin/x86_64-linux" +ENV PATH "$PATH:/usr/local/texlive/2025/bin/x86_64-linux" +ENV PATH "$PATH:/usr/local/texlive/2026/bin/x86_64-linux" + +# 指定路径 WORKDIR /gpt + +RUN pip3 install openai numpy arxiv rich +RUN pip3 install colorama Markdown pygments pymupdf +RUN pip3 install python-docx pdfminer +RUN pip3 install nougat-ocr + +# 装载项目文件 COPY . . -RUN /root/.cargo/bin/uv venv --seed \ - && source .venv/bin/activate \ - && /root/.cargo/bin/uv pip install openai numpy arxiv rich colorama Markdown pygments pymupdf python-docx pdfminer \ - && /root/.cargo/bin/uv pip install -r requirements.txt \ - && /root/.cargo/bin/uv clean + + +# 安装依赖 +RUN pip3 install -r requirements.txt + +# edge-tts需要的依赖 +RUN apt update && apt install ffmpeg -y # 可选步骤,用于预热模块 -RUN .venv/bin/python3 -c 'from check_proxy import warm_up_modules; warm_up_modules()' +RUN python3 -c 'from check_proxy import warm_up_modules; warm_up_modules()' # 启动 -CMD [".venv/bin/python3", "-u", "main.py"] +CMD ["python3", "-u", "main.py"] diff --git a/docs/GithubAction+NoLocal+Latex+Arm b/docs/GithubAction+NoLocal+Latex+Arm new file mode 100644 index 00000000..533c6e35 --- /dev/null +++ b/docs/GithubAction+NoLocal+Latex+Arm @@ -0,0 +1,21 @@ +# 此Dockerfile适用于“无本地模型”的环境构建,如果需要使用chatglm等本地模型,请参考 docs/Dockerfile+ChatGLM +# - 1 修改 `config.py` +# - 2 构建 docker build -t gpt-academic-nolocal-latex -f docs/GithubAction+NoLocal+Latex . +# - 3 运行 docker run -v /home/fuqingxu/arxiv_cache:/root/arxiv_cache --rm -it --net=host gpt-academic-nolocal-latex + +FROM menghuan1918/ubuntu_uv_ctex:latest +ENV DEBIAN_FRONTEND=noninteractive +SHELL ["/bin/bash", "-c"] +WORKDIR /gpt +COPY . . +RUN /root/.cargo/bin/uv venv --seed \ + && source .venv/bin/activate \ + && /root/.cargo/bin/uv pip install openai numpy arxiv rich colorama Markdown pygments pymupdf python-docx pdfminer \ + && /root/.cargo/bin/uv pip install -r requirements.txt \ + && /root/.cargo/bin/uv clean + +# 可选步骤,用于预热模块 +RUN .venv/bin/python3 -c 'from check_proxy import warm_up_modules; warm_up_modules()' + +# 启动 +CMD [".venv/bin/python3", "-u", "main.py"]