Skip to content

Commit 1f318f4

Browse files
authored
Merge pull request jupyter#1100 from romainx/hadolint
Docker linting -> Hadolint
2 parents 76402a2 + 4d5bb32 commit 1f318f4

File tree

12 files changed

+175
-49
lines changed

12 files changed

+175
-49
lines changed

.hadolint.yaml

+3
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,3 @@
1+
ignored:
2+
- DL3006
3+
- DL3008

.travis.yml

+4-2
Original file line numberDiff line numberDiff line change
@@ -12,10 +12,11 @@ jobs:
1212
install:
1313
- pip install --upgrade pip
1414
- make dev-env
15+
- make lint-install
1516
script:
1617
- set -e
1718
- if [ $(make n-docs-diff) -ne 0 ]; then make docs; fi;
18-
- if [ $(make n-other-diff) -ne 0 ]; then make build-test-all DARGS="--build-arg TEST_ONLY_BUILD=1"; fi;
19+
- if [ $(make n-other-diff) -ne 0 ]; then make lint-build-test-all DARGS="--build-arg TEST_ONLY_BUILD=1"; fi;
1920
- stage: push-tx
2021
install:
2122
- pip install --upgrade pip
@@ -26,10 +27,11 @@ jobs:
2627
install:
2728
- pip install --upgrade pip
2829
- make dev-env
30+
- make lint-install
2931
script:
3032
- set -e
3133
- make docs
32-
- make build-test-all DARGS="--build-arg TEST_ONLY_BUILD=1"
34+
- make lint-build-test-all DARGS="--build-arg TEST_ONLY_BUILD=1"
3335

3436
stages:
3537
- name: diff-test

Makefile

+20
Original file line numberDiff line numberDiff line change
@@ -24,6 +24,9 @@ endif
2424

2525
ALL_IMAGES:=$(ALL_STACKS)
2626

27+
# Linter
28+
HADOLINT="${HOME}/hadolint"
29+
2730
help:
2831
# http://marmelab.com/blog/2016/02/29/auto-documented-makefile.html
2932
@echo "jupyter/docker-stacks"
@@ -73,6 +76,23 @@ dev/%: ## run a foreground container for a stack
7376
dev-env: ## install libraries required to build docs and run tests
7477
pip install -r requirements-dev.txt
7578

79+
lint/%: ARGS?=
80+
lint/%: ## lint the dockerfile(s) for a stack
81+
@echo "Linting Dockerfiles in $(notdir $@)..."
82+
@git ls-files --exclude='Dockerfile*' --ignored $(notdir $@) | grep -v ppc64 | xargs -L 1 $(HADOLINT) $(ARGS)
83+
@echo "Linting done!"
84+
85+
lint-all: $(foreach I,$(ALL_IMAGES),lint/$(I) ) ## lint all stacks
86+
87+
lint-build-test-all: $(foreach I,$(ALL_IMAGES),lint/$(I) arch_patch/$(I) build/$(I) test/$(I) ) ## lint, build and test all stacks
88+
89+
lint-install: ## install hadolint
90+
@echo "Installing hadolint at $(HADOLINT) ..."
91+
@curl -sL -o $(HADOLINT) "https://github.com/hadolint/hadolint/releases/download/v1.17.6/hadolint-$(shell uname -s)-$(shell uname -m)"
92+
@chmod 700 $(HADOLINT)
93+
@echo "Installation done!"
94+
@$(HADOLINT) --version
95+
7696
img-clean: img-rm-dang img-rm ## clean dangling and jupyter images
7797

7898
img-list: ## list jupyter images

all-spark-notebook/Dockerfile

+9-8
Original file line numberDiff line numberDiff line change
@@ -30,22 +30,23 @@ RUN conda install --quiet --yes \
3030
'r-sparklyr=1.2*' \
3131
&& \
3232
conda clean --all -f -y && \
33-
fix-permissions $CONDA_DIR && \
34-
fix-permissions /home/$NB_USER
33+
fix-permissions "${CONDA_DIR}" && \
34+
fix-permissions "/home/${NB_USER}"
3535

3636
# Apache Toree kernel
37+
# hadolint ignore=DL3013
3738
RUN pip install --no-cache-dir \
3839
https://dist.apache.org/repos/dist/release/incubator/toree/0.3.0-incubating/toree-pip/toree-0.3.0.tar.gz \
3940
&& \
4041
jupyter toree install --sys-prefix && \
41-
rm -rf /home/$NB_USER/.local && \
42-
fix-permissions $CONDA_DIR && \
43-
fix-permissions /home/$NB_USER
42+
rm -rf "/home/${NB_USER}/.local" && \
43+
fix-permissions "${CONDA_DIR}" && \
44+
fix-permissions "/home/${NB_USER}"
4445

4546
# Spylon-kernel
4647
RUN conda install --quiet --yes 'spylon-kernel=0.4*' && \
4748
conda clean --all -f -y && \
4849
python -m spylon_kernel install --sys-prefix && \
49-
rm -rf /home/$NB_USER/.local && \
50-
fix-permissions $CONDA_DIR && \
51-
fix-permissions /home/$NB_USER
50+
rm -rf "/home/${NB_USER}/.local" && \
51+
fix-permissions "${CONDA_DIR}" && \
52+
fix-permissions "/home/${NB_USER}"

base-notebook/Dockerfile

+7-2
Original file line numberDiff line numberDiff line change
@@ -13,6 +13,9 @@ ARG NB_USER="jovyan"
1313
ARG NB_UID="1000"
1414
ARG NB_GID="100"
1515

16+
# Fix DL4006
17+
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
18+
1619
USER root
1720

1821
# Install all OS dependencies for notebook server that starts but lacks all
@@ -76,8 +79,8 @@ ENV MINICONDA_VERSION=4.8.2 \
7679
MINICONDA_MD5=87e77f097f6ebb5127c77662dfc3165e \
7780
CONDA_VERSION=4.8.2
7881

79-
RUN cd /tmp && \
80-
wget --quiet https://repo.continuum.io/miniconda/Miniconda3-py37_${MINICONDA_VERSION}-Linux-x86_64.sh && \
82+
WORKDIR /tmp
83+
RUN wget --quiet https://repo.continuum.io/miniconda/Miniconda3-py37_${MINICONDA_VERSION}-Linux-x86_64.sh && \
8184
echo "${MINICONDA_MD5} *Miniconda3-py37_${MINICONDA_VERSION}-Linux-x86_64.sh" | md5sum -c - && \
8285
/bin/bash Miniconda3-py37_${MINICONDA_VERSION}-Linux-x86_64.sh -f -b -p $CONDA_DIR && \
8386
rm Miniconda3-py37_${MINICONDA_VERSION}-Linux-x86_64.sh && \
@@ -137,3 +140,5 @@ RUN fix-permissions /etc/jupyter/
137140

138141
# Switch back to jovyan to avoid accidental container runs as root
139142
USER $NB_UID
143+
144+
WORKDIR $HOME

datascience-notebook/Dockerfile

+21-14
Original file line numberDiff line numberDiff line change
@@ -9,6 +9,9 @@ LABEL maintainer="Jupyter Project <[email protected]>"
99
# be skipped to shorten build time.
1010
ARG TEST_ONLY_BUILD
1111

12+
# Fix DL4006
13+
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
14+
1215
USER root
1316

1417
# R pre-requisites
@@ -25,21 +28,23 @@ ENV JULIA_DEPOT_PATH=/opt/julia
2528
ENV JULIA_PKGDIR=/opt/julia
2629
ENV JULIA_VERSION=1.4.1
2730

28-
RUN mkdir /opt/julia-${JULIA_VERSION} && \
29-
cd /tmp && \
30-
wget -q https://julialang-s3.julialang.org/bin/linux/x64/`echo ${JULIA_VERSION} | cut -d. -f 1,2`/julia-${JULIA_VERSION}-linux-x86_64.tar.gz && \
31+
WORKDIR /tmp
32+
33+
# hadolint ignore=SC2046
34+
RUN mkdir "/opt/julia-${JULIA_VERSION}" && \
35+
wget -q https://julialang-s3.julialang.org/bin/linux/x64/$(echo "${JULIA_VERSION}" | cut -d. -f 1,2)"/julia-${JULIA_VERSION}-linux-x86_64.tar.gz" && \
3136
echo "fd6d8cadaed678174c3caefb92207a3b0e8da9f926af6703fb4d1e4e4f50610a *julia-${JULIA_VERSION}-linux-x86_64.tar.gz" | sha256sum -c - && \
32-
tar xzf julia-${JULIA_VERSION}-linux-x86_64.tar.gz -C /opt/julia-${JULIA_VERSION} --strip-components=1 && \
33-
rm /tmp/julia-${JULIA_VERSION}-linux-x86_64.tar.gz
37+
tar xzf "julia-${JULIA_VERSION}-linux-x86_64.tar.gz" -C "/opt/julia-${JULIA_VERSION}" --strip-components=1 && \
38+
rm "/tmp/julia-${JULIA_VERSION}-linux-x86_64.tar.gz"
3439
RUN ln -fs /opt/julia-*/bin/julia /usr/local/bin/julia
3540

3641
# Show Julia where conda libraries are \
3742
RUN mkdir /etc/julia && \
3843
echo "push!(Libdl.DL_LOAD_PATH, \"$CONDA_DIR/lib\")" >> /etc/julia/juliarc.jl && \
3944
# Create JULIA_PKGDIR \
40-
mkdir $JULIA_PKGDIR && \
41-
chown $NB_USER $JULIA_PKGDIR && \
42-
fix-permissions $JULIA_PKGDIR
45+
mkdir "${JULIA_PKGDIR}" && \
46+
chown "${NB_USER}" "${JULIA_PKGDIR}" && \
47+
fix-permissions "${JULIA_PKGDIR}"
4348

4449
USER $NB_UID
4550

@@ -66,8 +71,8 @@ RUN conda install --quiet --yes \
6671
'rpy2=3.1*' \
6772
&& \
6873
conda clean --all -f -y && \
69-
fix-permissions $CONDA_DIR && \
70-
fix-permissions /home/$NB_USER
74+
fix-permissions "${CONDA_DIR}" && \
75+
fix-permissions "/home/${NB_USER}"
7176

7277
# Add Julia packages. Only add HDF5 if this is not a test-only build since
7378
# it takes roughly half the entire build time of all of the images on Travis
@@ -80,7 +85,9 @@ RUN julia -e 'import Pkg; Pkg.update()' && \
8085
(test $TEST_ONLY_BUILD || julia -e 'import Pkg; Pkg.add("HDF5")') && \
8186
julia -e "using Pkg; pkg\"add IJulia\"; pkg\"precompile\"" && \
8287
# move kernelspec out of home \
83-
mv $HOME/.local/share/jupyter/kernels/julia* $CONDA_DIR/share/jupyter/kernels/ && \
84-
chmod -R go+rx $CONDA_DIR/share/jupyter && \
85-
rm -rf $HOME/.local && \
86-
fix-permissions $JULIA_PKGDIR $CONDA_DIR/share/jupyter
88+
mv "${HOME}/.local/share/jupyter/kernels/julia"* "${CONDA_DIR}/share/jupyter/kernels/" && \
89+
chmod -R go+rx "${CONDA_DIR}/share/jupyter" && \
90+
rm -rf "${HOME}/.local" && \
91+
fix-permissions "${JULIA_PKGDIR}" "${CONDA_DIR}/share/jupyter"
92+
93+
WORKDIR $HOME

docs/contributing/lint.md

+78
Original file line numberDiff line numberDiff line change
@@ -0,0 +1,78 @@
1+
# Image Lint
2+
3+
To comply with [Docker best practices][dbp], we are using the [Hadolint][hadolint] tool to analyse each `Dockerfile` .
4+
5+
## Installation
6+
7+
There is a specific `make` target to install the linter.
8+
By default `hadolint` will be installed in `${HOME}/hadolint`.
9+
10+
```bash
11+
$ make lint-install
12+
13+
# Installing hadolint at /Users/romain/hadolint ...
14+
# Installation done!
15+
# Haskell Dockerfile Linter v1.17.6-0-gc918759
16+
```
17+
18+
## Lint
19+
20+
### Per Stack
21+
22+
The linter can be run per stack.
23+
24+
```bash
25+
$ make lint/scipy-notebook
26+
27+
# Linting Dockerfiles in scipy-notebook...
28+
# scipy-notebook/Dockerfile:4 DL3006 Always tag the version of an image explicitly
29+
# scipy-notebook/Dockerfile:11 DL3008 Pin versions in apt get install. Instead of `apt-get install <package>` use `apt-get install <package>=<version>`
30+
# scipy-notebook/Dockerfile:18 SC2086 Double quote to prevent globbing and word splitting.
31+
# scipy-notebook/Dockerfile:68 SC2086 Double quote to prevent globbing and word splitting.
32+
# scipy-notebook/Dockerfile:68 DL3003 Use WORKDIR to switch to a directory
33+
# scipy-notebook/Dockerfile:79 SC2086 Double quote to prevent globbing and word splitting.
34+
# make: *** [lint/scipy-notebook] Error 1
35+
```
36+
37+
Optionally you can pass arguments to the linter.
38+
39+
```bash
40+
# Use a different export format
41+
$ make lint/scipy-notebook ARGS="--format codeclimate"
42+
```
43+
44+
### All the Stacks
45+
46+
The linter can be run against all the stacks.
47+
48+
```bash
49+
$ make lint-all
50+
```
51+
52+
## Ignoring Rules
53+
54+
Sometimes it is necessary to ignore [some rules][rules].
55+
The following rules are ignored by default and sor for all images in the `.hadolint.yaml` file.
56+
57+
- [`DL3006`][DL3006]: We use a specific policy to manage image tags.
58+
- `base-notebook` `FROM` clause is fixed but based on an argument (`ARG`).
59+
- Building downstream images from (`FROM`) the latest is done on purpose.
60+
- [`DL3008`][DL3008]: System packages are always updated (`apt-get`) to the latest version.
61+
62+
For other rules, the preferred way to do it is to flag ignored rules in the `Dockerfile`.
63+
64+
> It is also possible to ignore rules by using a special comment directly above the Dockerfile instruction you want to make an exception for. Ignore rule comments look like `# hadolint ignore=DL3001,SC1081`. For example:
65+
66+
```dockerfile
67+
68+
FROM ubuntu
69+
70+
# hadolint ignore=DL3003,SC1035
71+
RUN cd /tmp && echo "hello!"
72+
```
73+
74+
[hadolint]: https://github.com/hadolint/hadolint
75+
[dbp]: https://docs.docker.com/develop/develop-images/dockerfile_best-practices
76+
[rules]: https://github.com/hadolint/hadolint#rules
77+
[DL3006]: https://github.com/hadolint/hadolint/wiki/DL3006
78+
[DL3008]: https://github.com/hadolint/hadolint/wiki/DL3008

docs/index.rst

+1
Original file line numberDiff line numberDiff line change
@@ -47,6 +47,7 @@ Table of Contents
4747
contributing/packages
4848
contributing/recipes
4949
contributing/translations
50+
contributing/lint
5051
contributing/tests
5152
contributing/features
5253
contributing/stacks

pyspark-notebook/Dockerfile

+15-7
Original file line numberDiff line numberDiff line change
@@ -5,6 +5,9 @@ FROM $BASE_CONTAINER
55

66
LABEL maintainer="Jupyter Project <[email protected]>"
77

8+
# Fix DL4006
9+
SHELL ["/bin/bash", "-o", "pipefail", "-c"]
10+
811
USER root
912

1013
# Spark dependencies
@@ -16,13 +19,16 @@ RUN apt-get -y update && \
1619
rm -rf /var/lib/apt/lists/*
1720

1821
# Using the preferred mirror to download Spark
19-
RUN cd /tmp && \
20-
wget -q $(wget -qO- https://www.apache.org/dyn/closer.lua/spark/spark-${APACHE_SPARK_VERSION}/spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz\?as_json | \
22+
WORKDIR /tmp
23+
# hadolint ignore=SC2046
24+
RUN wget -q $(wget -qO- https://www.apache.org/dyn/closer.lua/spark/spark-${APACHE_SPARK_VERSION}/spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz\?as_json | \
2125
python -c "import sys, json; content=json.load(sys.stdin); print(content['preferred']+content['path_info'])") && \
2226
echo "2426a20c548bdfc07df288cd1d18d1da6b3189d0b78dee76fa034c52a4e02895f0ad460720c526f163ba63a17efae4764c46a1cd8f9b04c60f9937a554db85d2 *spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz" | sha512sum -c - && \
23-
tar xzf spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz -C /usr/local --owner root --group root --no-same-owner && \
24-
rm spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz
25-
RUN cd /usr/local && ln -s spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION} spark
27+
tar xzf "spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz" -C /usr/local --owner root --group root --no-same-owner && \
28+
rm "spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}.tgz"
29+
30+
WORKDIR /usr/local
31+
RUN ln -s "spark-${APACHE_SPARK_VERSION}-bin-hadoop${HADOOP_VERSION}" spark
2632

2733
# Configure Spark
2834
ENV SPARK_HOME=/usr/local/spark
@@ -35,5 +41,7 @@ USER $NB_UID
3541
# Install pyarrow
3642
RUN conda install --quiet -y 'pyarrow' && \
3743
conda clean --all -f -y && \
38-
fix-permissions $CONDA_DIR && \
39-
fix-permissions /home/$NB_USER
44+
fix-permissions "${CONDA_DIR}" && \
45+
fix-permissions "/home/${NB_USER}"
46+
47+
WORKDIR $HOME

r-notebook/Dockerfile

+1-1
Original file line numberDiff line numberDiff line change
@@ -47,7 +47,7 @@ RUN conda install --quiet --yes \
4747
'unixodbc=2.3.*' \
4848
&& \
4949
conda clean --all -f -y && \
50-
fix-permissions $CONDA_DIR
50+
fix-permissions "${CONDA_DIR}"
5151

5252
# Install e1071 R package (dependency of the caret R package)
5353
RUN conda install --quiet --yes r-e1071

scipy-notebook/Dockerfile

+14-13
Original file line numberDiff line numberDiff line change
@@ -59,24 +59,25 @@ RUN conda install --quiet --yes \
5959
jupyter lab build -y && \
6060
jupyter lab clean -y && \
6161
npm cache clean --force && \
62-
rm -rf /home/$NB_USER/.cache/yarn && \
63-
rm -rf /home/$NB_USER/.node-gyp && \
64-
fix-permissions $CONDA_DIR && \
65-
fix-permissions /home/$NB_USER
62+
rm -rf "/home/${NB_USER}/.cache/yarn" && \
63+
rm -rf "/home/${NB_USER}/.node-gyp" && \
64+
fix-permissions "${CONDA_DIR}" && \
65+
fix-permissions "/home/${NB_USER}"
6666

6767
# Install facets which does not have a pip or conda package at the moment
68-
RUN cd /tmp && \
69-
git clone https://github.com/PAIR-code/facets.git && \
70-
cd facets && \
71-
jupyter nbextension install facets-dist/ --sys-prefix && \
72-
cd && \
68+
WORKDIR /tmp
69+
RUN git clone https://github.com/PAIR-code/facets.git && \
70+
jupyter nbextension install facets/facets-dist/ --sys-prefix && \
7371
rm -rf /tmp/facets && \
74-
fix-permissions $CONDA_DIR && \
75-
fix-permissions /home/$NB_USER
72+
fix-permissions "${CONDA_DIR}" && \
73+
fix-permissions "/home/${NB_USER}"
7674

7775
# Import matplotlib the first time to build the font cache.
78-
ENV XDG_CACHE_HOME /home/$NB_USER/.cache/
76+
ENV XDG_CACHE_HOME="/home/${NB_USER}/.cache/"
77+
7978
RUN MPLBACKEND=Agg python -c "import matplotlib.pyplot" && \
80-
fix-permissions /home/$NB_USER
79+
fix-permissions "/home/${NB_USER}"
8180

8281
USER $NB_UID
82+
83+
WORKDIR $HOME

tensorflow-notebook/Dockerfile

+2-2
Original file line numberDiff line numberDiff line change
@@ -8,5 +8,5 @@ LABEL maintainer="Jupyter Project <[email protected]>"
88
# Install Tensorflow
99
RUN pip install --quiet --no-cache-dir \
1010
'tensorflow==2.2.0' && \
11-
fix-permissions $CONDA_DIR && \
12-
fix-permissions /home/$NB_USER
11+
fix-permissions "${CONDA_DIR}" && \
12+
fix-permissions "/home/${NB_USER}"

0 commit comments

Comments
 (0)