# Dockerfile

# Maven + Spring Boot

FROM maven:3-openjdk-16
WORKDIR /code
RUN mkdir -p /root/.m2 && curl -sL https://blog.lyh543.cn/mirrors/maven.xml -o /root/.m2/settings.xml
COPY pom.xml pom.xml
RUN mvn clean package -Dmaven.test.skip -Dmaven.main.skip -Dspring-boot.repackage.skip && rm -r target/
COPY src ./src
RUN mvn clean package -Dmaven.test.skip
EXPOSE 9091
CMD ["java", "-jar", "target/Aurora-DriveSyncer-0.0.1-SNAPSHOT.jar"]

# npm

FROM node:14.1-alpine AS builder
WORKDIR /code
RUN npm config set registry https://registry.npm.taobao.org
COPY package.json package-lock.json ./
RUN npm install
COPY . ./
EXPOSE 3000
CMD ["npm", "run", "start"]

# RubyGems + Jekyll

FROM ruby:2-buster
RUN mkdir /srv/jekyll && \
    gem sources --add https://gems.ruby-china.com/ --remove https://rubygems.org/ && \
    bundle config mirror.https://rubygems.org https://gems.ruby-china.com
WORKDIR /srv/jekyll
COPY Gemfile Gemfile.lock ./
RUN bundle install
EXPOSE 4000
CMD [ "bundle", "exec", "jekyll", "serve", "--host=0.0.0.0", "--port=4000", "--watch", "--force-polling" ]

为了实现热重载,不能直接将博客文件 COPY 到镜像内,只能在 run container 的时候以 volumn 形式挂载(直接写到 docker-compose.yaml 里就行)。

version: "3.9"
services:
  jekyll:
    build: .
    volumes:
      - .:/srv/jekyll
    ports:
      - "4000:4000"

# Pipenv + Django + MySQL

Django 的 docker 化分两种情况,一种是 migrations 文件提交到 git 里了,这种情况下在 dockerfile CMD 里面写 python3 manage.py migrate && gunicorn ... 就行。

而另一种 migrations 被 git ignore 了,这种会比较尴尬,因为 makemgrations 生成的文件是需要持久化的,放在 docker 期间跑是没法持久化的。于是我采用的方案是 docker 内和 docker 外都有一套完整的 Python + Pipenv 环境,在外面跑 makemigrations + migrate + collectstatic,在里面直接跑 gunicorn

这样又带来一个问题就是 docker 内外的 db 以及 redis 的 host 不一样。这里我的解决方案是 Python 统一从环境变量(以及 .env 文件)读取 HOST 等其它信息,在 docker-compose 里就可以覆盖掉 docker 外的环境变量。可以参考 uestcmsc_webapp_backend (opens new window) 这个项目的 dockerfiledocker-compose.yaml

如果使用第一个方案,还会有一个问题,就是 static files 需要直接交给 nginx 等 web 服务器来提供服务。要么 docker 里再套一个 nginx,要么把 static 文件夹挂载到 docker 外,不然还是得 docker 外面配一套 Python + Pipenv 环境。

FROM python:3.8
WORKDIR /code
RUN apt-get update \
    && apt-get install -y python3-dev default-libmysqlclient-dev build-essential
RUN pip3 config set global.index-url http://pypi.doubanio.com/simple \
    && pip3 config set global.trusted-host pypi.doubanio.com \
    && pip3 install pipenv
COPY Pipfile Pipfile.lock ./
RUN pipenv install --system --deploy
COPY . ./
EXPOSE 8000
CMD ["gunicorn","-c", "uestcmsc_webapp_backend/gunicorn.conf.py","uestcmsc_webapp_backend.wsgi"]

# docker-compose

# 调用本地 / git 上的 dockerfile

version: "3.9"
services:
  frontend:
    build: https://github.com/Aurora-DriveSyncer/Aurora-DriveSyncer-frontend.git
    ports:
      - "3000:3000"
  backend:
    build: ../Aurora-DriveSyncer-backend
    ports:
      - "9091:9091"

# ftp

ftp:
  image: fauria/vsftpd
  environment:
    FTP_USER: "user"
    FTP_PASS: "user"
    # LOG_STDOUT: "1"
    PASV_MIN_PORT: "21000"
    PASV_MAX_PORT: "21010"
    PASV_ADDRESS: "127.0.0.1"
    REVERSE_LOOKUP_ENABLE: "NO"
  ports:
    - "20-21:20-21"
    - "21000-21010:21000-21010"
  restart: always

# webdav + 持久化

webdav:
  image: bytemark/webdav
  restart: always
  ports:
    - "80:80"
  environment:
    AUTH_TYPE: Digest
    USERNAME: alice
    PASSWORD: secret1234
  volumes:
    - ~/git/project/data/webdav:/var/lib/dav

# mysql + 持久化

db:
  image: mysql:8
  restart: always
  # network_mode: host
  ports:
    - "3306:3306"
  environment:
    TZ: 'Asia/Shanghai'
    MYSQL_ROOT_PASSWORD: secret1234
  volumes:
    - "~/git/project/data/db:/var/lib/mysql"

# postgresql + 持久化

db:
  image: postgres:14
  restart: always
  ports:
  - "5432:5432"
  environment:
    TZ: 'Asia/Shanghai'
    POSTGRES_USER: myuser
    POSTGRES_PASSWORD: mypassword
    POSTGRES_DB: mydb
  volumes:
      - ./data:/var/lib/postgresql/data

# redis + 持久化

redis:
  image: redis:6
  command: redis-server --appendonly yes --requirepass secret1234
  restart: always
  # network_mode: host
  ports:
    - "6379:6379"
  volumes:
    - "~/git/project/data/redis:/data"