# 此Dockerfile适用于“无本地模型”的环境构建,如果需要使用chatglm等本地模型,请参考 docs/Dockerfile+ChatGLM # - 1 修改 `config.py` # - 2 构建 docker build -t gpt-academic-nolocal-latex -f docs/Dockerfile+NoLocal+Latex . # - 3 运行 docker run -v /home/fuqingxu/arxiv_cache:/root/arxiv_cache --rm -it --net=host gpt-academic-nolocal-latex FROM fuqingxu/python311_texlive_ctex:latest # 指定路径 WORKDIR /gpt # 装载项目文件 COPY . . ARG useProxyNetwork='' # # # comment out below if you do not need proxy network | 如果不需要翻墙 - 从此行向下删除 # RUN apt-get update # RUN apt-get install -y curl proxychains # RUN $useProxyNetwork curl cip.cc # RUN sed -i '$ d' /etc/proxychains.conf # RUN sed -i '$ d' /etc/proxychains.conf # RUN echo "socks5 127.0.0.1 10880" >> /etc/proxychains.conf # ARG useProxyNetwork=proxychains # # # comment out above if you do not need proxy network | 如果不需要翻墙 - 从此行向上删除 # 安装依赖 RUN $useProxyNetwork pip3 install -r requirements.txt -i https://mirrors.aliyun.com/pypi/simple # 可选步骤,用于预热模块 RUN python3 -c 'from check_proxy import warm_up_modules; warm_up_modules()' # 启动 CMD ["python3", "-u", "main.py"]