Skip to content

Commit

Permalink
Revert "Refactoring multiple parts of the dockerfile (pypi#10004)" (p…
Browse files Browse the repository at this point in the history
…ypi#10020)

This reverts commit 6581b02.
  • Loading branch information
di authored and domdfcoding committed Jun 7, 2022
1 parent 8bd48df commit e31752d
Show file tree
Hide file tree
Showing 16 changed files with 132 additions and 196 deletions.
8 changes: 2 additions & 6 deletions .dockerignore
Original file line number Diff line number Diff line change
@@ -1,10 +1,6 @@
.git
.git/*
node_modules
dev
dev/*
**/*.pyc
htmlcov
warehouse/static/dist
.mypy_cache
.state
tests
.github
191 changes: 83 additions & 108 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,28 +1,18 @@
# ---------------------------------- STATIC ----------------------------------

# First things first, we build an image which is where we're going to compile
# our static assets with.
# our static assets with. It is important that the steps in this remain the
# same as the steps in Dockerfile.static, EXCEPT this may include additional
# steps appended onto the end.
FROM node:14.4.0 as static

WORKDIR /opt/warehouse/src/

# By default, Docker has special steps to avoid keeping APT caches in the layers, which
# is good, but in our case, we're going to mount a special cache volume (kept between
# builds), so we WANT the cache to persist.
RUN set -eux; \
rm -f /etc/apt/apt.conf.d/docker-clean; \
echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache;

# The list of C packages we need are almost never going to change, so installing
# them first, right off the bat lets us cache that and having node.js level
# dependency changes not trigger a reinstall.
RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \
--mount=type=cache,target=/var/lib/apt,sharing=locked \
set -eux; \
apt-get update; \
apt-get install --no-install-recommends -y \
libjpeg-dev \
nasm
RUN set -x \
&& apt-get update \
&& apt-get install --no-install-recommends -y \
libjpeg-dev nasm

# However, we do want to trigger a reinstall of our node.js dependencies anytime
# our package.json changes, so we'll ensure that we're copying that into our
Expand All @@ -32,10 +22,10 @@ COPY package.json package-lock.json .babelrc /opt/warehouse/src/
# Installing npm dependencies is done as a distinct step and *prior* to copying
# over our static files so that, you guessed it, we don't invalidate the cache
# of installed dependencies just because files have been modified.
RUN set -eux \
npm install -g npm@latest; \
npm install -g gulp-cli; \
npm ci;
RUN set -x \
&& npm install -g npm@latest \
&& npm install -g gulp-cli \
&& npm ci

# Actually copy over our static files, we only copy over the static files to
# save a small amount of space in our image and because we don't need them. We
Expand All @@ -49,124 +39,109 @@ COPY Gulpfile.babel.js /opt/warehouse/src/
RUN gulp dist


# ---------------------------------- BASE -----------------------------------
FROM python:3.8.2-slim-buster as base

# Setup some basic environment variables that are ~never going to change.
ENV PYTHONUNBUFFERED 1
ENV PYTHONPATH /opt/warehouse/src/
ENV PATH="/opt/warehouse/bin:${PATH}"

WORKDIR /opt/warehouse/src/

# By default, Docker has special steps to avoid keeping APT caches in the layers, which
# is good, but in our case, we're going to mount a special cache volume (kept between
# builds), so we WANT the cache to persist.
RUN set -eux \
rm -f /etc/apt/apt.conf.d/docker-clean; \
echo 'Binary::apt::APT::Keep-Downloaded-Packages "true";' > /etc/apt/apt.conf.d/keep-cache

# Install System level Warehouse requirements, this is done before everything
# else because these are rarely ever going to change.
RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \
--mount=type=cache,target=/var/lib/apt,sharing=locked \
set -eux; \
apt-get update; \
apt-get install --no-install-recommends -y \
libpq5 \
libxml2 \
libxslt1.1 \
libcurl4 \
;

# ---------------------------------- BUILD ----------------------------------

# Now we're going to build our actual application, but not the actual production
# image that it gets deployed into.
FROM base as build
FROM python:3.8.2-slim-buster as build

# Define whether we're building a production or a development image. This will
# generally be used to control whether or not we install our development and
# test dependencies.
ARG DEVEL=no

# To enable Ipython in the development environment set to yes (for using ipython
# as the warehouse shell interpreter,
# i.e. 'docker-compose run --rm web python -m warehouse shell --type=ipython')
ARG IPYTHON=no

# Install System level Warehouse build requirements, this is done before
# everything else because these are rarely ever going to change.
RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \
--mount=type=cache,target=/var/lib/apt,sharing=locked \
set -eux; \
apt-get update; \
apt-get install --no-install-recommends -y \
build-essential \
libcurl4-openssl-dev \
libffi-dev \
libpq-dev \
libssl-dev \
libxml2-dev \
libxslt-dev \
;
RUN set -x \
&& apt-get update \
&& apt-get install --no-install-recommends -y \
build-essential libffi-dev libxml2-dev libxslt-dev libpq-dev libcurl4-openssl-dev libssl-dev \
$(if [ "$DEVEL" = "yes" ]; then echo 'libjpeg-dev'; fi)

# We create an /opt directory with a virtual environment in it to store our
# application in.
RUN python3 -m venv /opt/warehouse
RUN set -x \
&& python3 -m venv /opt/warehouse


# Pip configuration (https://github.com/pypa/warehouse/pull/4584)
ENV PIP_NO_BINARY=hiredis PIP_DISABLE_PIP_VERSION_CHECK=1
# Now that we've created our virtual environment, we'll go ahead and update
# our $PATH to refer to it first.
ENV PATH="/opt/warehouse/bin:${PATH}"

# Next, we want to update pip, setuptools, and wheel inside of this virtual
# environment to ensure that we have the latest versions of them.
# TODO: We use --require-hashes in our requirements files, but not here, making
# the ones in the requirements files kind of a moot point. We should
# probably pin these too, and update them as we do anything else.
RUN pip --no-cache-dir --disable-pip-version-check install --upgrade pip setuptools wheel

# We copy this into the docker container prior to copying in the rest of our
# application so that we can skip installing requirements if the only thing
# that has changed is the Warehouse code itself.
COPY requirements /tmp/requirements

# Next, we want to update pip, setuptools, and wheel inside of this virtual
# environment to ensure that we have the latest versions of them.
RUN --mount=type=cache,target=/root/.cache \
pip install -r /tmp/requirements/pip.txt
# Install our development dependencies if we're building a development install
# otherwise this will do nothing.
RUN set -x \
&& if [ "$DEVEL" = "yes" ]; then pip --no-cache-dir --disable-pip-version-check install -r /tmp/requirements/dev.txt; fi

RUN set -x \
&& if [ "$DEVEL" = "yes" ] && [ "$IPYTHON" = "yes" ]; then pip --no-cache-dir --disable-pip-version-check install -r /tmp/requirements/ipython.txt; fi

# Install the Python level Warehouse requirements, this is done after copying
# the requirements but prior to copying Warehouse itself into the container so
# that code changes don't require triggering an entire install of all of
# Warehouse's dependencies.
RUN --mount=type=cache,target=/root/.cache \
set -eux; \
pip install -r /tmp/requirements/all-base.txt; \
find /opt/warehouse -name '*.pyc' -delete;
RUN set -x \
&& pip --no-cache-dir --disable-pip-version-check \
install --no-binary hiredis \
-r /tmp/requirements/deploy.txt \
-r /tmp/requirements/main.txt \
$(if [ "$DEVEL" = "yes" ]; then echo '-r /tmp/requirements/tests.txt -r /tmp/requirements/lint.txt'; fi) \
&& find /opt/warehouse -name '*.pyc' -delete

# ---------------------------------- DEV ----------------------------------

FROM build as dev

# To enable Ipython in the development environment set to yes (for using ipython
# as the warehouse shell interpreter,
# i.e. 'docker-compose run --rm web python -m warehouse shell --type=ipython')
ARG IPYTHON=no

# This is a work around because otherwise postgresql-client bombs out trying
# to create symlinks to these directories.
RUN set -eux; \
mkdir -p /usr/share/man/man1; \
mkdir -p /usr/share/man/man7

# Install System level Warehouse build requirements, this is done before
# everything else because these are rarely ever going to change.
RUN --mount=type=cache,target=/var/cache/apt,sharing=locked \
--mount=type=cache,target=/var/lib/apt,sharing=locked \
set -eux; \
apt-get update; \
apt-get install --no-install-recommends -y \
bash \
libjpeg-dev \
libjpeg62 \
postgresql-client \
;

# Install our development dependencies
RUN set -eux; \
pip install -r /tmp/requirements/dev.txt; \
if [ "$IPYTHON" = "yes" ]; then pip install -r /tmp/requirements/all-ipython.txt; fi;

RUN pip install -r /tmp/requirements/all-lint-test.txt;


# ---------------------------------- APP ----------------------------------
FROM base as app
# Now we're going to build our actual application image, which will eventually
# pull in the static files that were built above.
FROM python:3.8.2-slim-buster

# Setup some basic environment variables that are ~never going to change.
ENV PYTHONUNBUFFERED 1
ENV PYTHONPATH /opt/warehouse/src/
ENV PATH="/opt/warehouse/bin:${PATH}"

WORKDIR /opt/warehouse/src/

# Define whether we're building a production or a development image. This will
# generally be used to control whether or not we install our development and
# test dependencies.
ARG DEVEL=no

# This is a work around because otherwise postgresql-client bombs out trying
# to create symlinks to these directories.
RUN set -x \
&& mkdir -p /usr/share/man/man1 \
&& mkdir -p /usr/share/man/man7

# Install System level Warehouse requirements, this is done before everything
# else because these are rarely ever going to change.
RUN set -x \
&& apt-get update \
&& apt-get install --no-install-recommends -y \
libpq5 libxml2 libxslt1.1 libcurl4 \
$(if [ "$DEVEL" = "yes" ]; then echo 'bash libjpeg62 postgresql-client'; fi) \
&& apt-get clean \
&& rm -rf /var/lib/apt/lists/* /tmp/* /var/tmp/*

# Copy the directory into the container, this is done last so that changes to
# Warehouse itself require the least amount of layers being invalidated from
Expand Down
24 changes: 24 additions & 0 deletions Dockerfile.static
Original file line number Diff line number Diff line change
@@ -0,0 +1,24 @@
FROM node:14.4.0 as static

WORKDIR /opt/warehouse/src/

# The list of C packages we need are almost never going to change, so installing
# them first, right off the bat lets us cache that and having node.js level
# dependency changes not trigger a reinstall.
RUN set -x \
&& apt-get update \
&& apt-get install --no-install-recommends -y \
libjpeg-dev nasm

# However, we do want to trigger a reinstall of our node.js dependencies anytime
# our package.json changes, so we'll ensure that we're copying that into our
# static container prior to actually installing the npm dependencies.
COPY package.json package-lock.json .babelrc /opt/warehouse/src/

# Installing npm dependencies is done as a distinct step and *prior* to copying
# over our static files so that, you guessed it, we don't invalidate the cache
# of installed dependencies just because files have been modified.
RUN set -x \
&& npm install -g npm@latest \
&& npm install -g gulp-cli \
&& npm ci
2 changes: 1 addition & 1 deletion Makefile
Original file line number Diff line number Diff line change
Expand Up @@ -64,9 +64,9 @@ endif

.state/docker-build: Dockerfile package.json package-lock.json requirements/main.txt requirements/deploy.txt
# Build our docker containers for this project.
docker-compose build --force-rm static
docker-compose build --build-arg IPYTHON=$(IPYTHON) --force-rm web
docker-compose build --force-rm worker
docker-compose build --force-rm static

# Mark the state so we don't rebuild this needlessly.
mkdir -p .state
Expand Down
13 changes: 8 additions & 5 deletions docker-compose.yml
Original file line number Diff line number Diff line change
Expand Up @@ -61,8 +61,8 @@ services:
web:
build:
context: .
target: dev
args:
DEVEL: "yes"
IPYTHON: "no"
command: gunicorn --reload -b 0.0.0.0:8000 warehouse.wsgi:application
env_file: dev/environment
Expand All @@ -88,7 +88,8 @@ services:
- "80:8000"

files:
image: python:3.8.2-slim-buster
build:
context: .
working_dir: /var/opt/warehouse
command: python -m http.server 9001
volumes:
Expand All @@ -100,7 +101,8 @@ services:
worker:
build:
context: .
target: dev
args:
DEVEL: "yes"
command: hupper -m celery -A warehouse worker -B -S redbeat.RedBeatScheduler -l info
volumes:
- ./warehouse:/opt/warehouse/src/warehouse:z
Expand All @@ -112,7 +114,7 @@ services:
static:
build:
context: .
target: static
dockerfile: Dockerfile.static
command: bash -c "node --trace-warnings `which gulp` watch"
volumes:
- ./warehouse:/opt/warehouse/src/warehouse:z
Expand All @@ -128,7 +130,8 @@ services:
- "1080:80"

notdatadog:
image: python:3.8.2-slim-buster
build:
context: .
command: python /opt/warehouse/dev/notdatadog.py 0.0.0.0:8125
ports:
- "8125:8125/udp"
Expand Down
2 changes: 0 additions & 2 deletions requirements/all-base.txt

This file was deleted.

2 changes: 0 additions & 2 deletions requirements/all-ipython.txt

This file was deleted.

3 changes: 0 additions & 3 deletions requirements/all-lint-test.txt

This file was deleted.

1 change: 0 additions & 1 deletion requirements/deploy.in
Original file line number Diff line number Diff line change
@@ -1,2 +1 @@
-r pip.txt
gunicorn==20.1.0
20 changes: 5 additions & 15 deletions requirements/deploy.txt
Original file line number Diff line number Diff line change
@@ -1,5 +1,5 @@
#
# This file is autogenerated by pip-compile with python 3.8
# This file is autogenerated by pip-compile
# To update, run:
#
# pip-compile --allow-unsafe --generate-hashes --output-file=requirements/deploy.txt requirements/deploy.in
Expand All @@ -8,19 +8,9 @@ gunicorn==20.1.0 \
--hash=sha256:9dcc4547dbb1cb284accfb15ab5667a0e5d1881cc443e0677b4882a4067a807e \
--hash=sha256:e0a968b5ba15f8a328fdfd7ab1fcb5af4470c28aaf7e55df02a99bc13138e6e8
# via -r requirements/deploy.in
wheel==0.37.0 \
--hash=sha256:21014b2bd93c6d0034b6ba5d35e4eb284340e09d63c59aef6fc14b0f346146fd \
--hash=sha256:e2ef7239991699e3355d54f8e968a21bb940a1dbf34a4d226741e64462516fad
# via -r requirements/pip.txt

# The following packages are considered to be unsafe in a requirements file:
pip==21.2.4 \
--hash=sha256:0eb8a1516c3d138ae8689c0c1a60fde7143310832f9dc77e11d8a4bc62de193b \
--hash=sha256:fa9ebb85d3fd607617c0c44aca302b1b45d87f9c2a1649b46c26167ca4296323
# via -r requirements/pip.txt
setuptools==57.5.0 \
--hash=sha256:60d78588f15b048f86e35cdab73003d8b21dd45108ee61a6693881a427f22073 \
--hash=sha256:d9d3266d50f59c6967b9312844470babbdb26304fe740833a5f8d89829ba3a24
# via
# -r requirements/pip.txt
# gunicorn
setuptools==57.4.0 \
--hash=sha256:6bac238ffdf24e8806c61440e755192470352850f3419a52f26ffe0a1a64f465 \
--hash=sha256:a49230977aa6cfb9d933614d2f7b79036e9945c4cdd7583163f4e920b83418d6
# via gunicorn
1 change: 0 additions & 1 deletion requirements/docs.in
Original file line number Diff line number Diff line change
@@ -1,4 +1,3 @@
-r pip.txt
Sphinx
sphinx_rtd_theme
sphinxcontrib-httpdomain
Loading

0 comments on commit e31752d

Please sign in to comment.