Compare commits
131 Commits
| Author | SHA1 | Date | |
|---|---|---|---|
|
|
d20bde221b | ||
|
|
91ab080476 | ||
|
|
3fb82b0fde | ||
|
|
730ee6952a | ||
|
|
76d2578b77 | ||
|
|
ca9bf8ce5c | ||
|
|
de7e563ad2 | ||
|
|
d102ba94e5 | ||
|
|
cf6d74a2be | ||
|
|
80cea505c4 | ||
|
|
c0c72219a9 | ||
|
|
6bf15d6577 | ||
|
|
2fdcf19223 | ||
|
|
c024dcd8d9 | ||
|
|
0570d264ed | ||
|
|
f0a9a9fb04 | ||
|
|
bf0a9190c8 | ||
|
|
1add81a1ce | ||
|
|
8bfe96968d | ||
|
|
aab53102db | ||
|
|
1265766b3a | ||
|
|
3765d41b5c | ||
|
|
939d2992f1 | ||
|
|
a92ea9c386 | ||
|
|
d880f69836 | ||
|
|
ea989a78c4 | ||
|
|
a615d551a5 | ||
|
|
6995122edd | ||
|
|
ad6c06d075 | ||
|
|
fe3c9d22d5 | ||
|
|
7d54465eb0 | ||
|
|
1505156bfa | ||
|
|
6ddcd3f716 | ||
|
|
2f2fb40c56 | ||
|
|
c3933ab7ae | ||
|
|
5c1503aff5 | ||
|
|
f34f2ce857 | ||
|
|
8d5724e70e | ||
|
|
23c4420630 | ||
|
|
09eae003cf | ||
|
|
4ba50052a5 | ||
|
|
9c5393dfa0 | ||
|
|
9c76d246f6 | ||
|
|
096c6b5848 | ||
|
|
8b51f53bfe | ||
|
|
8ab895b38a | ||
|
|
3a0b1b197c | ||
|
|
3d8fed70e7 | ||
|
|
c2bca952a4 | ||
|
|
0c5468ba29 | ||
|
|
18b1f446ab | ||
|
|
f23ef9b293 | ||
|
|
e74f8c12e4 | ||
|
|
4dffbf031c | ||
|
|
49ed8a0b2c | ||
|
|
05977b507d | ||
|
|
7a41d55e61 | ||
|
|
e7174adaf1 | ||
|
|
f38ac6ff0b | ||
|
|
17f87be607 | ||
|
|
658df9e905 | ||
|
|
aa31e6baf6 | ||
|
|
a6e70b653d | ||
|
|
e061deee3e | ||
|
|
79ecb74140 | ||
|
|
ce2b04e34e | ||
|
|
1111f81fbf | ||
|
|
f057c22649 | ||
|
|
f4742240c2 | ||
|
|
6dcb04d9d6 | ||
|
|
dbd8611288 | ||
|
|
b135d68b6f | ||
|
|
6987b36981 | ||
|
|
cec9ffb36b | ||
|
|
47298e7f37 | ||
|
|
c7519e31a2 | ||
|
|
226d7239e5 | ||
|
|
fa044d0129 | ||
|
|
7418b6adee | ||
|
|
4eb10fd63a | ||
|
|
cee3e47550 | ||
|
|
7459a75cac | ||
|
|
a2b28d03b8 | ||
|
|
f226e090fe | ||
|
|
0cc62575a7 | ||
|
|
e5a1a26a4a | ||
|
|
89717949c4 | ||
|
|
fd48454649 | ||
|
|
8fe0d7231e | ||
|
|
8607e26f5e | ||
|
|
9cf80692d2 | ||
|
|
798a27075c | ||
|
|
4b6c41f950 | ||
|
|
eebb82bb3f | ||
|
|
09131ad78f | ||
|
|
7c402d329a | ||
|
|
a80868209f | ||
|
|
113da00d0a | ||
|
|
3d76736421 | ||
|
|
20b130a666 | ||
|
|
5eab0a9730 | ||
|
|
23f7feff20 | ||
|
|
c7adfe432a | ||
|
|
4f85381d8c | ||
|
|
d8cf7df70b | ||
|
|
c08698c11a | ||
|
|
8ce0aafcd3 | ||
|
|
41289bdd58 | ||
|
|
de74f56e87 | ||
|
|
bbb770b6c0 | ||
|
|
fea14ae5a4 | ||
|
|
7da8fa586b | ||
|
|
a4c4de3cf4 | ||
|
|
fded8bdcc4 | ||
|
|
49d17bded0 | ||
|
|
fea57446a5 | ||
|
|
41fc498166 | ||
|
|
3e76bbd51f | ||
|
|
7cb09b78db | ||
|
|
062f94d0f7 | ||
|
|
cb463d01d2 | ||
|
|
1322db7ec3 | ||
|
|
07bc55786b | ||
|
|
397ec03285 | ||
|
|
138e061c17 | ||
|
|
b72da80f13 | ||
|
|
3de08b138e | ||
|
|
040d9fcee6 | ||
|
|
f7f6949c53 | ||
|
|
f950671a1c | ||
|
|
c1fb17a645 |
65
.circleci/config.yml
Normal file
65
.circleci/config.yml
Normal file
@@ -0,0 +1,65 @@
|
||||
version: 2
|
||||
|
||||
.job_template: &job_template
|
||||
machine:
|
||||
enabled: true
|
||||
steps:
|
||||
- checkout
|
||||
- run:
|
||||
command: ./circle-test.sh
|
||||
- persist_to_workspace:
|
||||
root: .
|
||||
paths: [ 'ci-workspace' ]
|
||||
|
||||
jobs:
|
||||
amd64:
|
||||
<<: *job_template
|
||||
arm64:
|
||||
<<: *job_template
|
||||
armhf:
|
||||
<<: *job_template
|
||||
armel:
|
||||
<<: *job_template
|
||||
deploy:
|
||||
docker:
|
||||
- image: circleci/python:latest
|
||||
steps:
|
||||
- setup_remote_docker:
|
||||
version: 18.06.0-ce
|
||||
- checkout
|
||||
- attach_workspace:
|
||||
at: .
|
||||
- run:
|
||||
command: ./circle-deploy.sh
|
||||
|
||||
|
||||
|
||||
workflows:
|
||||
version: 2
|
||||
build:
|
||||
jobs:
|
||||
- amd64:
|
||||
filters:
|
||||
tags:
|
||||
only: /^v.*/
|
||||
- arm64:
|
||||
filters:
|
||||
tags:
|
||||
only: /^v.*/
|
||||
- armhf:
|
||||
filters:
|
||||
tags:
|
||||
only: /^v.*/
|
||||
- armel:
|
||||
filters:
|
||||
tags:
|
||||
only: /^v.*/
|
||||
- deploy:
|
||||
requires:
|
||||
- amd64
|
||||
- arm64
|
||||
- armhf
|
||||
- armel
|
||||
filters:
|
||||
tags:
|
||||
only: /^v.*/
|
||||
@@ -1 +1,5 @@
|
||||
**/*.sw*
|
||||
.tox
|
||||
.git
|
||||
**/__pycache__
|
||||
.pipenv
|
||||
|
||||
2
.gitignore
vendored
2
.gitignore
vendored
@@ -3,9 +3,11 @@
|
||||
.cache
|
||||
__pycache__
|
||||
.tox
|
||||
.pipenv
|
||||
.eggs
|
||||
UNKNOWN.egg-info
|
||||
.env
|
||||
ci-workspace
|
||||
|
||||
# WIP/test stuff
|
||||
doco.yml
|
||||
|
||||
24
.travis.yml
24
.travis.yml
@@ -1,24 +0,0 @@
|
||||
sudo: required
|
||||
services:
|
||||
- docker
|
||||
language: python
|
||||
env:
|
||||
global:
|
||||
- QEMU_VER=v2.9.1
|
||||
matrix:
|
||||
- ARCH=amd64
|
||||
- ARCH=armhf
|
||||
- ARCH=aarch64
|
||||
python:
|
||||
- "2.7"
|
||||
install:
|
||||
- pip install -r requirements.txt
|
||||
script:
|
||||
# prepare qemu
|
||||
- docker run --rm --privileged multiarch/qemu-user-static:register --reset
|
||||
# generate and build dockerfile
|
||||
- ./Dockerfile.py --arch=${ARCH} -v
|
||||
- docker images
|
||||
# run docker build & tests
|
||||
# 2 parallel max b/c race condition with docker fixture (I think?)
|
||||
- py.test -vv -n 2 -k "${ARCH}" ./test/
|
||||
@@ -1,33 +1,34 @@
|
||||
#!/usr/bin/env python
|
||||
#!/usr/bin/env python3
|
||||
""" Dockerfile.py - generates and build dockerfiles
|
||||
|
||||
Usage:
|
||||
Dockerfile.py [--arch=<arch> ...] [--skip=<arch> ...] [-v] [-t] [--no-build | --no-generate] [--no-cache]
|
||||
Dockerfile.py [--hub_tag=<tag>] [--arch=<arch> ...] [-v] [-t] [--no-build | --no-generate] [--no-cache]
|
||||
|
||||
Options:
|
||||
--no-build Skip building the docker images
|
||||
--no-cache Build without using any cache data
|
||||
--no-generate Skip generating Dockerfiles from template
|
||||
--arch=<arch> What Architecture(s) to build [default: amd64 armel armhf aarch64]
|
||||
--skip=<arch> What Architectures(s) to skip [default: None]
|
||||
--hub_tag=<tag> What the Docker Hub Image should be tagged as [default: None]
|
||||
--arch=<arch> What Architecture(s) to build [default: amd64 armel armhf arm64]
|
||||
-v Print docker's command output [default: False]
|
||||
-t Print docker's build time [default: False]
|
||||
|
||||
Examples:
|
||||
"""
|
||||
|
||||
from docopt import docopt
|
||||
|
||||
from jinja2 import Environment, FileSystemLoader
|
||||
from docopt import docopt
|
||||
import os
|
||||
import testinfra
|
||||
import subprocess
|
||||
import sys
|
||||
|
||||
THIS_DIR = os.path.dirname(os.path.abspath(__file__))
|
||||
|
||||
base_vars = {
|
||||
'name': 'pihole/pihole',
|
||||
'maintainer' : 'adam@diginc.us',
|
||||
's6_version' : 'v1.21.7.0',
|
||||
's6_version' : 'v1.22.1.0',
|
||||
}
|
||||
|
||||
os_base_vars = {
|
||||
@@ -45,41 +46,43 @@ images = {
|
||||
__version__: [
|
||||
{
|
||||
'base': 'pihole/debian-base:latest',
|
||||
'arch': 'amd64'
|
||||
'arch': 'amd64',
|
||||
's6arch': 'amd64',
|
||||
},
|
||||
{
|
||||
'base': 'multiarch/debian-debootstrap:armel-stretch-slim',
|
||||
'arch': 'armel'
|
||||
'arch': 'armel',
|
||||
's6arch': 'arm',
|
||||
},
|
||||
{
|
||||
'base': 'multiarch/debian-debootstrap:armhf-stretch-slim',
|
||||
'arch': 'armhf'
|
||||
'arch': 'arm',
|
||||
's6arch' : 'arm',
|
||||
},
|
||||
{
|
||||
'base': 'multiarch/debian-debootstrap:arm64-stretch-slim',
|
||||
'arch': 'aarch64'
|
||||
'arch': 'arm64',
|
||||
's6arch' : 'aarch64',
|
||||
}
|
||||
]
|
||||
}
|
||||
|
||||
def generate_dockerfiles(args):
|
||||
if args['--no-generate']:
|
||||
print " ::: Skipping Dockerfile generation"
|
||||
print(" ::: Skipping Dockerfile generation")
|
||||
return
|
||||
|
||||
for version, archs in images.iteritems():
|
||||
for version, archs in images.items():
|
||||
for image in archs:
|
||||
if image['arch'] not in args['--arch'] or image['arch'] in args['--skip']:
|
||||
return
|
||||
s6arch = image['arch']
|
||||
if image['arch'] == 'armel':
|
||||
s6arch = 'arm'
|
||||
if image['arch'] not in args['--arch']:
|
||||
continue
|
||||
s6arch = image['s6arch'] if image['s6arch'] else image['arch']
|
||||
merged_data = dict(
|
||||
{ 'version': version }.items() +
|
||||
base_vars.items() +
|
||||
os_base_vars.items() +
|
||||
image.items() +
|
||||
{ 's6arch': s6arch }.items()
|
||||
list({ 'version': version }.items()) +
|
||||
list(base_vars.items()) +
|
||||
list(os_base_vars.items()) +
|
||||
list(image.items()) +
|
||||
list({ 's6arch': s6arch }.items())
|
||||
)
|
||||
j2_env = Environment(loader=FileSystemLoader(THIS_DIR),
|
||||
trim_blocks=True)
|
||||
@@ -92,25 +95,32 @@ def generate_dockerfiles(args):
|
||||
|
||||
def build_dockerfiles(args):
|
||||
if args['--no-build']:
|
||||
print " ::: Skipping Dockerfile building"
|
||||
print(" ::: Skipping Dockerfile building")
|
||||
return
|
||||
|
||||
for arch in args['--arch']:
|
||||
# TODO: include from external .py that can be shared with Dockerfile.py / Tests / deploy scripts '''
|
||||
if arch == 'armel':
|
||||
print "Skipping armel, incompatible upstream binaries/broken"
|
||||
continue
|
||||
build('pihole', arch, args)
|
||||
|
||||
|
||||
def build(docker_repo, arch, args):
|
||||
run_local = testinfra.get_backend(
|
||||
"local://"
|
||||
).get_module("Command").run
|
||||
def run_and_stream_command_output(command, args):
|
||||
print("Running", command)
|
||||
build_result = subprocess.Popen(command.split(), stdout=subprocess.PIPE, stderr=subprocess.STDOUT,
|
||||
bufsize=1, universal_newlines=True)
|
||||
if args['-v']:
|
||||
while build_result.poll() is None:
|
||||
for line in build_result.stdout:
|
||||
print(line, end='')
|
||||
build_result.wait()
|
||||
if build_result.returncode != 0:
|
||||
print(" ::: Error running".format(command))
|
||||
print(build_result.stderr)
|
||||
|
||||
|
||||
def build(docker_repo, arch, args):
|
||||
dockerfile = 'Dockerfile_{}'.format(arch)
|
||||
repo_tag = '{}:{}_{}'.format(docker_repo, __version__, arch)
|
||||
cached_image = '{}/{}'.format('pihole', repo_tag)
|
||||
print(" ::: Building {}".format(repo_tag))
|
||||
time=''
|
||||
if args['-t']:
|
||||
time='time '
|
||||
@@ -119,22 +129,21 @@ def build(docker_repo, arch, args):
|
||||
no_cache = '--no-cache'
|
||||
build_command = '{time}docker build {no_cache} --pull --cache-from="{cache},{create_tag}" -f {dockerfile} -t {create_tag} .'\
|
||||
.format(time=time, no_cache=no_cache, cache=cached_image, dockerfile=dockerfile, create_tag=repo_tag)
|
||||
print " ::: Building {} into {}".format(dockerfile, repo_tag)
|
||||
print(" ::: Building {} into {}".format(dockerfile, repo_tag))
|
||||
run_and_stream_command_output(build_command, args)
|
||||
if args['-v']:
|
||||
print build_command, '\n'
|
||||
build_result = run_local(build_command)
|
||||
if args['-v']:
|
||||
print build_result.stdout
|
||||
print build_result.stderr
|
||||
if build_result.rc != 0:
|
||||
print " ::: Building {} encountered an error".format(dockerfile)
|
||||
print build_result.stderr
|
||||
assert build_result.rc == 0
|
||||
print(build_command, '\n')
|
||||
if args['--hub_tag']:
|
||||
hub_tag_command = "{time}docker tag {create_tag} {hub_tag}"\
|
||||
.format(time=time, create_tag=repo_tag, hub_tag=args['--hub_tag'])
|
||||
print(" ::: Tagging {} into {}".format(repo_tag, args['--hub_tag']))
|
||||
run_and_stream_command_output(hub_tag_command, args)
|
||||
|
||||
|
||||
if __name__ == '__main__':
|
||||
args = docopt(__doc__, version='Dockerfile 1.0')
|
||||
# print args
|
||||
args = docopt(__doc__, version='Dockerfile 1.1')
|
||||
if args['-v']:
|
||||
print(args)
|
||||
|
||||
generate_dockerfiles(args)
|
||||
build_dockerfiles(args)
|
||||
|
||||
8
Dockerfile.sh
Executable file
8
Dockerfile.sh
Executable file
@@ -0,0 +1,8 @@
|
||||
#!/usr/bin/env sh
|
||||
# alpine sh only
|
||||
|
||||
set -eux
|
||||
./Dockerfile.py -v --arch="${ARCH}" --hub_tag="${ARCH_IMAGE}"
|
||||
# TODO: Add junitxml output and have circleci consume it
|
||||
# 2 parallel max b/c race condition with docker fixture (I think?)
|
||||
py.test -vv -n 2 -k "${ARCH}" ./test/
|
||||
@@ -1,6 +1,8 @@
|
||||
FROM {{ pihole.base }}
|
||||
|
||||
ENV ARCH {{ pihole.arch }}
|
||||
ENV S6OVERLAY_RELEASE https://github.com/just-containers/s6-overlay/releases/download/{{ pihole.s6_version }}/s6-overlay-{{ pihole.s6arch }}.tar.gz
|
||||
|
||||
COPY install.sh /usr/local/bin/install.sh
|
||||
COPY VERSION /etc/docker-pi-hole-version
|
||||
ENV PIHOLE_INSTALL /root/ph_install.sh
|
||||
@@ -36,13 +38,12 @@ ENV FTL_CMD no-daemon
|
||||
ENV DNSMASQ_USER root
|
||||
|
||||
ENV VERSION {{ pihole.version }}
|
||||
ENV ARCH {{ pihole.arch }}
|
||||
ENV PATH /opt/pihole:${PATH}
|
||||
|
||||
LABEL image="{{ pihole.name }}:{{ pihole.version }}_{{ pihole.arch }}"
|
||||
LABEL maintainer="{{ pihole.maintainer }}"
|
||||
LABEL url="https://www.github.com/pi-hole/docker-pi-hole"
|
||||
|
||||
HEALTHCHECK CMD dig @127.0.0.1 pi.hole || exit 1
|
||||
HEALTHCHECK CMD dig +norecurse +retry=0 @127.0.0.1 pi.hole || exit 1
|
||||
|
||||
SHELL ["/bin/bash", "-c"]
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
FROM pihole/debian-base:latest
|
||||
|
||||
ENV S6OVERLAY_RELEASE https://github.com/just-containers/s6-overlay/releases/download/v1.21.7.0/s6-overlay-amd64.tar.gz
|
||||
ENV ARCH amd64
|
||||
ENV S6OVERLAY_RELEASE https://github.com/just-containers/s6-overlay/releases/download/v1.22.1.0/s6-overlay-amd64.tar.gz
|
||||
|
||||
COPY install.sh /usr/local/bin/install.sh
|
||||
COPY VERSION /etc/docker-pi-hole-version
|
||||
ENV PIHOLE_INSTALL /root/ph_install.sh
|
||||
@@ -35,11 +37,10 @@ ENV ServerIP 0.0.0.0
|
||||
ENV FTL_CMD no-daemon
|
||||
ENV DNSMASQ_USER root
|
||||
|
||||
ENV VERSION v4.3
|
||||
ENV ARCH amd64
|
||||
ENV VERSION v4.4
|
||||
ENV PATH /opt/pihole:${PATH}
|
||||
|
||||
LABEL image="pihole/pihole:v4.3_amd64"
|
||||
LABEL image="pihole/pihole:v4.4_amd64"
|
||||
LABEL maintainer="adam@diginc.us"
|
||||
LABEL url="https://www.github.com/pi-hole/docker-pi-hole"
|
||||
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
FROM multiarch/debian-debootstrap:arm64-stretch-slim
|
||||
|
||||
ENV S6OVERLAY_RELEASE https://github.com/just-containers/s6-overlay/releases/download/v1.21.7.0/s6-overlay-aarch64.tar.gz
|
||||
ENV ARCH arm64
|
||||
ENV S6OVERLAY_RELEASE https://github.com/just-containers/s6-overlay/releases/download/v1.22.1.0/s6-overlay-aarch64.tar.gz
|
||||
|
||||
COPY install.sh /usr/local/bin/install.sh
|
||||
COPY VERSION /etc/docker-pi-hole-version
|
||||
ENV PIHOLE_INSTALL /root/ph_install.sh
|
||||
@@ -35,11 +37,10 @@ ENV ServerIP 0.0.0.0
|
||||
ENV FTL_CMD no-daemon
|
||||
ENV DNSMASQ_USER root
|
||||
|
||||
ENV VERSION v4.3
|
||||
ENV ARCH aarch64
|
||||
ENV VERSION v4.4
|
||||
ENV PATH /opt/pihole:${PATH}
|
||||
|
||||
LABEL image="pihole/pihole:v4.3_aarch64"
|
||||
LABEL image="pihole/pihole:v4.4_arm64"
|
||||
LABEL maintainer="adam@diginc.us"
|
||||
LABEL url="https://www.github.com/pi-hole/docker-pi-hole"
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
FROM multiarch/debian-debootstrap:armel-stretch-slim
|
||||
|
||||
ENV S6OVERLAY_RELEASE https://github.com/just-containers/s6-overlay/releases/download/v1.21.7.0/s6-overlay-arm.tar.gz
|
||||
ENV ARCH armel
|
||||
ENV S6OVERLAY_RELEASE https://github.com/just-containers/s6-overlay/releases/download/v1.22.1.0/s6-overlay-arm.tar.gz
|
||||
|
||||
COPY install.sh /usr/local/bin/install.sh
|
||||
COPY VERSION /etc/docker-pi-hole-version
|
||||
ENV PIHOLE_INSTALL /root/ph_install.sh
|
||||
@@ -35,11 +37,10 @@ ENV ServerIP 0.0.0.0
|
||||
ENV FTL_CMD no-daemon
|
||||
ENV DNSMASQ_USER root
|
||||
|
||||
ENV VERSION v4.3
|
||||
ENV ARCH armel
|
||||
ENV VERSION v4.4
|
||||
ENV PATH /opt/pihole:${PATH}
|
||||
|
||||
LABEL image="pihole/pihole:v4.3_armel"
|
||||
LABEL image="pihole/pihole:v4.4_armel"
|
||||
LABEL maintainer="adam@diginc.us"
|
||||
LABEL url="https://www.github.com/pi-hole/docker-pi-hole"
|
||||
|
||||
|
||||
@@ -1,6 +1,8 @@
|
||||
FROM multiarch/debian-debootstrap:armhf-stretch-slim
|
||||
|
||||
ENV ARCH armhf
|
||||
ENV S6OVERLAY_RELEASE https://github.com/just-containers/s6-overlay/releases/download/v1.21.7.0/s6-overlay-armhf.tar.gz
|
||||
|
||||
COPY install.sh /usr/local/bin/install.sh
|
||||
COPY VERSION /etc/docker-pi-hole-version
|
||||
ENV PIHOLE_INSTALL /root/ph_install.sh
|
||||
@@ -35,11 +37,10 @@ ENV ServerIP 0.0.0.0
|
||||
ENV FTL_CMD no-daemon
|
||||
ENV DNSMASQ_USER root
|
||||
|
||||
ENV VERSION v4.3
|
||||
ENV ARCH armhf
|
||||
ENV VERSION v4.4
|
||||
ENV PATH /opt/pihole:${PATH}
|
||||
|
||||
LABEL image="pihole/pihole:v4.3_armhf"
|
||||
LABEL image="pihole/pihole:v4.4_armhf"
|
||||
LABEL maintainer="adam@diginc.us"
|
||||
LABEL url="https://www.github.com/pi-hole/docker-pi-hole"
|
||||
|
||||
|
||||
23
Dockerfile_build
Normal file
23
Dockerfile_build
Normal file
@@ -0,0 +1,23 @@
|
||||
FROM docker:latest
|
||||
|
||||
# Based on https://github.com/Ilhicas/alpine-pipenv
|
||||
ARG packages
|
||||
RUN apk --update add python3 python3-dev curl gcc make \
|
||||
musl-dev libffi-dev openssl-dev ${packages} \
|
||||
&& rm -rf /var/cache/apk/* \
|
||||
&& pip3 install -U pip pipenv
|
||||
|
||||
|
||||
# -v "$(pwd):/$(pwd)" -w "$(pwd)" to prevent nested docker path confusion
|
||||
COPY ./Dockerfile.sh /usr/local/bin/
|
||||
COPY Pipfile* /root/
|
||||
WORKDIR /root
|
||||
|
||||
RUN pipenv install --system \
|
||||
&& sed -i 's|/bin/sh|/bin/bash|g' /usr/lib/python3.8/site-packages/testinfra/backend/docker.py
|
||||
|
||||
|
||||
RUN echo "set -ex && Dockerfile.sh && \$@" > /usr/local/bin/entrypoint.sh
|
||||
RUN chmod +x /usr/local/bin/entrypoint.sh
|
||||
ENTRYPOINT entrypoint.sh
|
||||
CMD Dockerfile.sh
|
||||
63
Pipfile
Normal file
63
Pipfile
Normal file
@@ -0,0 +1,63 @@
|
||||
[[source]]
|
||||
name = "pypi"
|
||||
url = "https://pypi.org/simple"
|
||||
verify_ssl = true
|
||||
|
||||
[dev-packages]
|
||||
|
||||
[packages]
|
||||
apipkg = "==1.5"
|
||||
atomicwrites = "==1.3.0"
|
||||
attrs = "==19.3.0"
|
||||
bcrypt = "==3.1.7"
|
||||
cached-property = "==1.5.1"
|
||||
certifi = "==2019.11.28"
|
||||
cffi = "==1.13.2"
|
||||
chardet = "==3.0.4"
|
||||
configparser = "==4.0.2"
|
||||
contextlib2 = "==0.6.0.post1"
|
||||
coverage = "==5.0.1"
|
||||
cryptography = "==2.8"
|
||||
docker = "==4.1.0"
|
||||
dockerpty = "==0.4.1"
|
||||
docopt = "==0.6.2"
|
||||
enum34 = "==1.1.6"
|
||||
execnet = "==1.7.1"
|
||||
filelock = "==3.0.12"
|
||||
funcsigs = "==1.0.2"
|
||||
idna = "==2.8"
|
||||
importlib-metadata = "==1.3.0"
|
||||
ipaddress = "==1.0.23"
|
||||
jsonschema = "==3.2.0"
|
||||
more-itertools = "==5.0.0"
|
||||
pathlib2 = "==2.3.5"
|
||||
pluggy = "==0.13.1"
|
||||
py = "==1.8.1"
|
||||
pycparser = "==2.19"
|
||||
pyparsing = "==2.4.6"
|
||||
pyrsistent = "==0.15.6"
|
||||
pytest = "==4.6.8"
|
||||
pytest-cov = "==2.8.1"
|
||||
pytest-forked = "==1.1.3"
|
||||
pytest-xdist = "==1.31.0"
|
||||
requests = "==2.22.0"
|
||||
scandir = "==1.10.0"
|
||||
six = "==1.13.0"
|
||||
subprocess32 = "==3.5.4"
|
||||
testinfra = "==3.3.0"
|
||||
texttable = "==1.6.2"
|
||||
toml = "==0.10.0"
|
||||
tox = "==3.14.3"
|
||||
urllib3 = "==1.25.7"
|
||||
virtualenv = "==16.7.9"
|
||||
wcwidth = "==0.1.7"
|
||||
zipp = "==0.6.0"
|
||||
"backports.shutil_get_terminal_size" = "==1.0.0"
|
||||
"backports.ssl_match_hostname" = "==3.7.0.1"
|
||||
Jinja2 = "==2.10.3"
|
||||
MarkupSafe = "==1.1.1"
|
||||
PyYAML = "==5.2"
|
||||
websocket_client = "==0.57.0"
|
||||
|
||||
[requires]
|
||||
python_version = "3.8"
|
||||
581
Pipfile.lock
generated
Normal file
581
Pipfile.lock
generated
Normal file
@@ -0,0 +1,581 @@
|
||||
{
|
||||
"_meta": {
|
||||
"hash": {
|
||||
"sha256": "ee7705112b315cad899e08bd6eac8f47e9a200a0d47a1920cc192995b79f8673"
|
||||
},
|
||||
"pipfile-spec": 6,
|
||||
"requires": {
|
||||
"python_version": "3.8"
|
||||
},
|
||||
"sources": [
|
||||
{
|
||||
"name": "pypi",
|
||||
"url": "https://pypi.org/simple",
|
||||
"verify_ssl": true
|
||||
}
|
||||
]
|
||||
},
|
||||
"default": {
|
||||
"apipkg": {
|
||||
"hashes": [
|
||||
"sha256:37228cda29411948b422fae072f57e31d3396d2ee1c9783775980ee9c9990af6",
|
||||
"sha256:58587dd4dc3daefad0487f6d9ae32b4542b185e1c36db6993290e7c41ca2b47c"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.5"
|
||||
},
|
||||
"atomicwrites": {
|
||||
"hashes": [
|
||||
"sha256:03472c30eb2c5d1ba9227e4c2ca66ab8287fbfbbda3888aa93dc2e28fc6811b4",
|
||||
"sha256:75a9445bac02d8d058d5e1fe689654ba5a6556a1dfd8ce6ec55a0ed79866cfa6"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.3.0"
|
||||
},
|
||||
"attrs": {
|
||||
"hashes": [
|
||||
"sha256:08a96c641c3a74e44eb59afb61a24f2cb9f4d7188748e76ba4bb5edfa3cb7d1c",
|
||||
"sha256:f7b7ce16570fe9965acd6d30101a28f62fb4a7f9e926b3bbc9b61f8b04247e72"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==19.3.0"
|
||||
},
|
||||
"backports.shutil-get-terminal-size": {
|
||||
"hashes": [
|
||||
"sha256:0975ba55054c15e346944b38956a4c9cbee9009391e41b86c68990effb8c1f64",
|
||||
"sha256:713e7a8228ae80341c70586d1cc0a8caa5207346927e23d09dcbcaf18eadec80"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.0.0"
|
||||
},
|
||||
"backports.ssl-match-hostname": {
|
||||
"hashes": [
|
||||
"sha256:bb82e60f9fbf4c080eabd957c39f0641f0fc247d9a16e31e26d594d8f42b9fd2"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==3.7.0.1"
|
||||
},
|
||||
"bcrypt": {
|
||||
"hashes": [
|
||||
"sha256:0258f143f3de96b7c14f762c770f5fc56ccd72f8a1857a451c1cd9a655d9ac89",
|
||||
"sha256:0b0069c752ec14172c5f78208f1863d7ad6755a6fae6fe76ec2c80d13be41e42",
|
||||
"sha256:19a4b72a6ae5bb467fea018b825f0a7d917789bcfe893e53f15c92805d187294",
|
||||
"sha256:5432dd7b34107ae8ed6c10a71b4397f1c853bd39a4d6ffa7e35f40584cffd161",
|
||||
"sha256:6305557019906466fc42dbc53b46da004e72fd7a551c044a827e572c82191752",
|
||||
"sha256:69361315039878c0680be456640f8705d76cb4a3a3fe1e057e0f261b74be4b31",
|
||||
"sha256:6fe49a60b25b584e2f4ef175b29d3a83ba63b3a4df1b4c0605b826668d1b6be5",
|
||||
"sha256:74a015102e877d0ccd02cdeaa18b32aa7273746914a6c5d0456dd442cb65b99c",
|
||||
"sha256:763669a367869786bb4c8fcf731f4175775a5b43f070f50f46f0b59da45375d0",
|
||||
"sha256:8b10acde4e1919d6015e1df86d4c217d3b5b01bb7744c36113ea43d529e1c3de",
|
||||
"sha256:9fe92406c857409b70a38729dbdf6578caf9228de0aef5bc44f859ffe971a39e",
|
||||
"sha256:a190f2a5dbbdbff4b74e3103cef44344bc30e61255beb27310e2aec407766052",
|
||||
"sha256:a595c12c618119255c90deb4b046e1ca3bcfad64667c43d1166f2b04bc72db09",
|
||||
"sha256:c9457fa5c121e94a58d6505cadca8bed1c64444b83b3204928a866ca2e599105",
|
||||
"sha256:cb93f6b2ab0f6853550b74e051d297c27a638719753eb9ff66d1e4072be67133",
|
||||
"sha256:ce4e4f0deb51d38b1611a27f330426154f2980e66582dc5f438aad38b5f24fc1",
|
||||
"sha256:d7bdc26475679dd073ba0ed2766445bb5b20ca4793ca0db32b399dccc6bc84b7",
|
||||
"sha256:ff032765bb8716d9387fd5376d987a937254b0619eff0972779515b5c98820bc"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==3.1.7"
|
||||
},
|
||||
"cached-property": {
|
||||
"hashes": [
|
||||
"sha256:3a026f1a54135677e7da5ce819b0c690f156f37976f3e30c5430740725203d7f",
|
||||
"sha256:9217a59f14a5682da7c4b8829deadbfc194ac22e9908ccf7c8820234e80a1504"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.5.1"
|
||||
},
|
||||
"certifi": {
|
||||
"hashes": [
|
||||
"sha256:017c25db2a153ce562900032d5bc68e9f191e44e9a0f762f373977de9df1fbb3",
|
||||
"sha256:25b64c7da4cd7479594d035c08c2d809eb4aab3a26e5a990ea98cc450c320f1f"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==2019.11.28"
|
||||
},
|
||||
"cffi": {
|
||||
"hashes": [
|
||||
"sha256:0b49274afc941c626b605fb59b59c3485c17dc776dc3cc7cc14aca74cc19cc42",
|
||||
"sha256:0e3ea92942cb1168e38c05c1d56b0527ce31f1a370f6117f1d490b8dcd6b3a04",
|
||||
"sha256:135f69aecbf4517d5b3d6429207b2dff49c876be724ac0c8bf8e1ea99df3d7e5",
|
||||
"sha256:19db0cdd6e516f13329cba4903368bff9bb5a9331d3410b1b448daaadc495e54",
|
||||
"sha256:2781e9ad0e9d47173c0093321bb5435a9dfae0ed6a762aabafa13108f5f7b2ba",
|
||||
"sha256:291f7c42e21d72144bb1c1b2e825ec60f46d0a7468f5346841860454c7aa8f57",
|
||||
"sha256:2c5e309ec482556397cb21ede0350c5e82f0eb2621de04b2633588d118da4396",
|
||||
"sha256:2e9c80a8c3344a92cb04661115898a9129c074f7ab82011ef4b612f645939f12",
|
||||
"sha256:32a262e2b90ffcfdd97c7a5e24a6012a43c61f1f5a57789ad80af1d26c6acd97",
|
||||
"sha256:3c9fff570f13480b201e9ab69453108f6d98244a7f495e91b6c654a47486ba43",
|
||||
"sha256:415bdc7ca8c1c634a6d7163d43fb0ea885a07e9618a64bda407e04b04333b7db",
|
||||
"sha256:42194f54c11abc8583417a7cf4eaff544ce0de8187abaf5d29029c91b1725ad3",
|
||||
"sha256:4424e42199e86b21fc4db83bd76909a6fc2a2aefb352cb5414833c030f6ed71b",
|
||||
"sha256:4a43c91840bda5f55249413037b7a9b79c90b1184ed504883b72c4df70778579",
|
||||
"sha256:599a1e8ff057ac530c9ad1778293c665cb81a791421f46922d80a86473c13346",
|
||||
"sha256:5c4fae4e9cdd18c82ba3a134be256e98dc0596af1e7285a3d2602c97dcfa5159",
|
||||
"sha256:5ecfa867dea6fabe2a58f03ac9186ea64da1386af2159196da51c4904e11d652",
|
||||
"sha256:62f2578358d3a92e4ab2d830cd1c2049c9c0d0e6d3c58322993cc341bdeac22e",
|
||||
"sha256:6471a82d5abea994e38d2c2abc77164b4f7fbaaf80261cb98394d5793f11b12a",
|
||||
"sha256:6d4f18483d040e18546108eb13b1dfa1000a089bcf8529e30346116ea6240506",
|
||||
"sha256:71a608532ab3bd26223c8d841dde43f3516aa5d2bf37b50ac410bb5e99053e8f",
|
||||
"sha256:74a1d8c85fb6ff0b30fbfa8ad0ac23cd601a138f7509dc617ebc65ef305bb98d",
|
||||
"sha256:7b93a885bb13073afb0aa73ad82059a4c41f4b7d8eb8368980448b52d4c7dc2c",
|
||||
"sha256:7d4751da932caaec419d514eaa4215eaf14b612cff66398dd51129ac22680b20",
|
||||
"sha256:7f627141a26b551bdebbc4855c1157feeef18241b4b8366ed22a5c7d672ef858",
|
||||
"sha256:8169cf44dd8f9071b2b9248c35fc35e8677451c52f795daa2bb4643f32a540bc",
|
||||
"sha256:aa00d66c0fab27373ae44ae26a66a9e43ff2a678bf63a9c7c1a9a4d61172827a",
|
||||
"sha256:ccb032fda0873254380aa2bfad2582aedc2959186cce61e3a17abc1a55ff89c3",
|
||||
"sha256:d754f39e0d1603b5b24a7f8484b22d2904fa551fe865fd0d4c3332f078d20d4e",
|
||||
"sha256:d75c461e20e29afc0aee7172a0950157c704ff0dd51613506bd7d82b718e7410",
|
||||
"sha256:dcd65317dd15bc0451f3e01c80da2216a31916bdcffd6221ca1202d96584aa25",
|
||||
"sha256:e570d3ab32e2c2861c4ebe6ffcad6a8abf9347432a37608fe1fbd157b3f0036b",
|
||||
"sha256:fd43a88e045cf992ed09fa724b5315b790525f2676883a6ea64e3263bae6549d"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.13.2"
|
||||
},
|
||||
"chardet": {
|
||||
"hashes": [
|
||||
"sha256:84ab92ed1c4d4f16916e05906b6b75a6c0fb5db821cc65e70cbd64a3e2a5eaae",
|
||||
"sha256:fc323ffcaeaed0e0a02bf4d117757b98aed530d9ed4531e3e15460124c106691"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==3.0.4"
|
||||
},
|
||||
"configparser": {
|
||||
"hashes": [
|
||||
"sha256:254c1d9c79f60c45dfde850850883d5aaa7f19a23f13561243a050d5a7c3fe4c",
|
||||
"sha256:c7d282687a5308319bf3d2e7706e575c635b0a470342641c93bea0ea3b5331df"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==4.0.2"
|
||||
},
|
||||
"contextlib2": {
|
||||
"hashes": [
|
||||
"sha256:01f490098c18b19d2bd5bb5dc445b2054d2fa97f09a4280ba2c5f3c394c8162e",
|
||||
"sha256:3355078a159fbb44ee60ea80abd0d87b80b78c248643b49aa6d94673b413609b"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==0.6.0.post1"
|
||||
},
|
||||
"coverage": {
|
||||
"hashes": [
|
||||
"sha256:0101888bd1592a20ccadae081ba10e8b204d20235d18d05c6f7d5e904a38fc10",
|
||||
"sha256:04b961862334687549eb91cd5178a6fbe977ad365bddc7c60f2227f2f9880cf4",
|
||||
"sha256:1ca43dbd739c0fc30b0a3637a003a0d2c7edc1dd618359d58cc1e211742f8bd1",
|
||||
"sha256:1cbb88b34187bdb841f2599770b7e6ff8e259dc3bb64fc7893acf44998acf5f8",
|
||||
"sha256:232f0b52a5b978288f0bbc282a6c03fe48cd19a04202df44309919c142b3bb9c",
|
||||
"sha256:24bcfa86fd9ce86b73a8368383c39d919c497a06eebb888b6f0c12f13e920b1a",
|
||||
"sha256:25b8f60b5c7da71e64c18888f3067d5b6f1334b9681876b2fb41eea26de881ae",
|
||||
"sha256:2714160a63da18aed9340c70ed514973971ee7e665e6b336917ff4cca81a25b1",
|
||||
"sha256:2ca2cd5264e84b2cafc73f0045437f70c6378c0d7dbcddc9ee3fe192c1e29e5d",
|
||||
"sha256:2cc707fc9aad2592fc686d63ef72dc0031fc98b6fb921d2f5395d9ab84fbc3ef",
|
||||
"sha256:348630edea485f4228233c2f310a598abf8afa5f8c716c02a9698089687b6085",
|
||||
"sha256:40fbfd6b044c9db13aeec1daf5887d322c710d811f944011757526ef6e323fd9",
|
||||
"sha256:46c9c6a1d1190c0b75ec7c0f339088309952b82ae8d67a79ff1319eb4e749b96",
|
||||
"sha256:591506e088901bdc25620c37aec885e82cc896528f28c57e113751e3471fc314",
|
||||
"sha256:5ac71bba1e07eab403b082c4428f868c1c9e26a21041436b4905c4c3d4e49b08",
|
||||
"sha256:5f622f19abda4e934938e24f1d67599249abc201844933a6f01aaa8663094489",
|
||||
"sha256:65bead1ac8c8930cf92a1ccaedcce19a57298547d5d1db5c9d4d068a0675c38b",
|
||||
"sha256:7362a7f829feda10c7265b553455de596b83d1623b3d436b6d3c51c688c57bf6",
|
||||
"sha256:7f2675750c50151f806070ec11258edf4c328340916c53bac0adbc465abd6b1e",
|
||||
"sha256:960d7f42277391e8b1c0b0ae427a214e1b31a1278de6b73f8807b20c2e913bba",
|
||||
"sha256:a50b0888d8a021a3342d36a6086501e30de7d840ab68fca44913e97d14487dc1",
|
||||
"sha256:b7dbc5e8c39ea3ad3db22715f1b5401cd698a621218680c6daf42c2f9d36e205",
|
||||
"sha256:bb3d29df5d07d5399d58a394d0ef50adf303ab4fbf66dfd25b9ef258effcb692",
|
||||
"sha256:c0fff2733f7c2950f58a4fd09b5db257b00c6fec57bf3f68c5bae004d804b407",
|
||||
"sha256:c792d3707a86c01c02607ae74364854220fb3e82735f631cd0a345dea6b4cee5",
|
||||
"sha256:c90bda74e16bcd03861b09b1d37c0a4158feda5d5a036bb2d6e58de6ff65793e",
|
||||
"sha256:cfce79ce41cc1a1dc7fc85bb41eeeb32d34a4cf39a645c717c0550287e30ff06",
|
||||
"sha256:eeafb646f374988c22c8e6da5ab9fb81367ecfe81c70c292623373d2a021b1a1",
|
||||
"sha256:f425f50a6dd807cb9043d15a4fcfba3b5874a54d9587ccbb748899f70dc18c47",
|
||||
"sha256:fcd4459fe35a400b8f416bc57906862693c9f88b66dc925e7f2a933e77f6b18b",
|
||||
"sha256:ff3936dd5feaefb4f91c8c1f50a06c588b5dc69fba4f7d9c79a6617ad80bb7df"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==5.0.1"
|
||||
},
|
||||
"cryptography": {
|
||||
"hashes": [
|
||||
"sha256:02079a6addc7b5140ba0825f542c0869ff4df9a69c360e339ecead5baefa843c",
|
||||
"sha256:1df22371fbf2004c6f64e927668734070a8953362cd8370ddd336774d6743595",
|
||||
"sha256:369d2346db5934345787451504853ad9d342d7f721ae82d098083e1f49a582ad",
|
||||
"sha256:3cda1f0ed8747339bbdf71b9f38ca74c7b592f24f65cdb3ab3765e4b02871651",
|
||||
"sha256:44ff04138935882fef7c686878e1c8fd80a723161ad6a98da31e14b7553170c2",
|
||||
"sha256:4b1030728872c59687badcca1e225a9103440e467c17d6d1730ab3d2d64bfeff",
|
||||
"sha256:58363dbd966afb4f89b3b11dfb8ff200058fbc3b947507675c19ceb46104b48d",
|
||||
"sha256:6ec280fb24d27e3d97aa731e16207d58bd8ae94ef6eab97249a2afe4ba643d42",
|
||||
"sha256:7270a6c29199adc1297776937a05b59720e8a782531f1f122f2eb8467f9aab4d",
|
||||
"sha256:73fd30c57fa2d0a1d7a49c561c40c2f79c7d6c374cc7750e9ac7c99176f6428e",
|
||||
"sha256:7f09806ed4fbea8f51585231ba742b58cbcfbfe823ea197d8c89a5e433c7e912",
|
||||
"sha256:90df0cc93e1f8d2fba8365fb59a858f51a11a394d64dbf3ef844f783844cc793",
|
||||
"sha256:971221ed40f058f5662a604bd1ae6e4521d84e6cad0b7b170564cc34169c8f13",
|
||||
"sha256:a518c153a2b5ed6b8cc03f7ae79d5ffad7315ad4569b2d5333a13c38d64bd8d7",
|
||||
"sha256:b0de590a8b0979649ebeef8bb9f54394d3a41f66c5584fff4220901739b6b2f0",
|
||||
"sha256:b43f53f29816ba1db8525f006fa6f49292e9b029554b3eb56a189a70f2a40879",
|
||||
"sha256:d31402aad60ed889c7e57934a03477b572a03af7794fa8fb1780f21ea8f6551f",
|
||||
"sha256:de96157ec73458a7f14e3d26f17f8128c959084931e8997b9e655a39c8fde9f9",
|
||||
"sha256:df6b4dca2e11865e6cfbfb708e800efb18370f5a46fd601d3755bc7f85b3a8a2",
|
||||
"sha256:ecadccc7ba52193963c0475ac9f6fa28ac01e01349a2ca48509667ef41ffd2cf",
|
||||
"sha256:fb81c17e0ebe3358486cd8cc3ad78adbae58af12fc2bf2bc0bb84e8090fa5ce8"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==2.8"
|
||||
},
|
||||
"docker": {
|
||||
"hashes": [
|
||||
"sha256:6e06c5e70ba4fad73e35f00c55a895a448398f3ada7faae072e2bb01348bafc1",
|
||||
"sha256:8f93775b8bdae3a2df6bc9a5312cce564cade58d6555f2c2570165a1270cd8a7"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==4.1.0"
|
||||
},
|
||||
"dockerpty": {
|
||||
"hashes": [
|
||||
"sha256:69a9d69d573a0daa31bcd1c0774eeed5c15c295fe719c61aca550ed1393156ce"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==0.4.1"
|
||||
},
|
||||
"docopt": {
|
||||
"hashes": [
|
||||
"sha256:49b3a825280bd66b3aa83585ef59c4a8c82f2c8a522dbe754a8bc8d08c85c491"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==0.6.2"
|
||||
},
|
||||
"enum34": {
|
||||
"hashes": [
|
||||
"sha256:2d81cbbe0e73112bdfe6ef8576f2238f2ba27dd0d55752a776c41d38b7da2850",
|
||||
"sha256:644837f692e5f550741432dd3f223bbb9852018674981b1664e5dc339387588a",
|
||||
"sha256:6bd0f6ad48ec2aa117d3d141940d484deccda84d4fcd884f5c3d93c23ecd8c79",
|
||||
"sha256:8ad8c4783bf61ded74527bffb48ed9b54166685e4230386a9ed9b1279e2df5b1"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.1.6"
|
||||
},
|
||||
"execnet": {
|
||||
"hashes": [
|
||||
"sha256:cacb9df31c9680ec5f95553976c4da484d407e85e41c83cb812aa014f0eddc50",
|
||||
"sha256:d4efd397930c46415f62f8a31388d6be4f27a91d7550eb79bc64a756e0056547"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.7.1"
|
||||
},
|
||||
"filelock": {
|
||||
"hashes": [
|
||||
"sha256:18d82244ee114f543149c66a6e0c14e9c4f8a1044b5cdaadd0f82159d6a6ff59",
|
||||
"sha256:929b7d63ec5b7d6b71b0fa5ac14e030b3f70b75747cef1b10da9b879fef15836"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==3.0.12"
|
||||
},
|
||||
"funcsigs": {
|
||||
"hashes": [
|
||||
"sha256:330cc27ccbf7f1e992e69fef78261dc7c6569012cf397db8d3de0234e6c937ca",
|
||||
"sha256:a7bb0f2cf3a3fd1ab2732cb49eba4252c2af4240442415b4abce3b87022a8f50"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.0.2"
|
||||
},
|
||||
"idna": {
|
||||
"hashes": [
|
||||
"sha256:c357b3f628cf53ae2c4c05627ecc484553142ca23264e593d327bcde5e9c3407",
|
||||
"sha256:ea8b7f6188e6fa117537c3df7da9fc686d485087abf6ac197f9c46432f7e4a3c"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==2.8"
|
||||
},
|
||||
"importlib-metadata": {
|
||||
"hashes": [
|
||||
"sha256:073a852570f92da5f744a3472af1b61e28e9f78ccf0c9117658dc32b15de7b45",
|
||||
"sha256:d95141fbfa7ef2ec65cfd945e2af7e5a6ddbd7c8d9a25e66ff3be8e3daf9f60f"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.3.0"
|
||||
},
|
||||
"ipaddress": {
|
||||
"hashes": [
|
||||
"sha256:6e0f4a39e66cb5bb9a137b00276a2eff74f93b71dcbdad6f10ff7df9d3557fcc",
|
||||
"sha256:b7f8e0369580bb4a24d5ba1d7cc29660a4a6987763faf1d8a8046830e020e7e2"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.0.23"
|
||||
},
|
||||
"jinja2": {
|
||||
"hashes": [
|
||||
"sha256:74320bb91f31270f9551d46522e33af46a80c3d619f4a4bf42b3164d30b5911f",
|
||||
"sha256:9fe95f19286cfefaa917656583d020be14e7859c6b0252588391e47db34527de"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==2.10.3"
|
||||
},
|
||||
"jsonschema": {
|
||||
"hashes": [
|
||||
"sha256:4e5b3cf8216f577bee9ce139cbe72eca3ea4f292ec60928ff24758ce626cd163",
|
||||
"sha256:c8a85b28d377cc7737e46e2d9f2b4f44ee3c0e1deac6bf46ddefc7187d30797a"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==3.2.0"
|
||||
},
|
||||
"markupsafe": {
|
||||
"hashes": [
|
||||
"sha256:00bc623926325b26bb9605ae9eae8a215691f33cae5df11ca5424f06f2d1f473",
|
||||
"sha256:09027a7803a62ca78792ad89403b1b7a73a01c8cb65909cd876f7fcebd79b161",
|
||||
"sha256:09c4b7f37d6c648cb13f9230d847adf22f8171b1ccc4d5682398e77f40309235",
|
||||
"sha256:1027c282dad077d0bae18be6794e6b6b8c91d58ed8a8d89a89d59693b9131db5",
|
||||
"sha256:24982cc2533820871eba85ba648cd53d8623687ff11cbb805be4ff7b4c971aff",
|
||||
"sha256:29872e92839765e546828bb7754a68c418d927cd064fd4708fab9fe9c8bb116b",
|
||||
"sha256:43a55c2930bbc139570ac2452adf3d70cdbb3cfe5912c71cdce1c2c6bbd9c5d1",
|
||||
"sha256:46c99d2de99945ec5cb54f23c8cd5689f6d7177305ebff350a58ce5f8de1669e",
|
||||
"sha256:500d4957e52ddc3351cabf489e79c91c17f6e0899158447047588650b5e69183",
|
||||
"sha256:535f6fc4d397c1563d08b88e485c3496cf5784e927af890fb3c3aac7f933ec66",
|
||||
"sha256:62fe6c95e3ec8a7fad637b7f3d372c15ec1caa01ab47926cfdf7a75b40e0eac1",
|
||||
"sha256:6dd73240d2af64df90aa7c4e7481e23825ea70af4b4922f8ede5b9e35f78a3b1",
|
||||
"sha256:717ba8fe3ae9cc0006d7c451f0bb265ee07739daf76355d06366154ee68d221e",
|
||||
"sha256:79855e1c5b8da654cf486b830bd42c06e8780cea587384cf6545b7d9ac013a0b",
|
||||
"sha256:7c1699dfe0cf8ff607dbdcc1e9b9af1755371f92a68f706051cc8c37d447c905",
|
||||
"sha256:88e5fcfb52ee7b911e8bb6d6aa2fd21fbecc674eadd44118a9cc3863f938e735",
|
||||
"sha256:8defac2f2ccd6805ebf65f5eeb132adcf2ab57aa11fdf4c0dd5169a004710e7d",
|
||||
"sha256:98c7086708b163d425c67c7a91bad6e466bb99d797aa64f965e9d25c12111a5e",
|
||||
"sha256:9add70b36c5666a2ed02b43b335fe19002ee5235efd4b8a89bfcf9005bebac0d",
|
||||
"sha256:9bf40443012702a1d2070043cb6291650a0841ece432556f784f004937f0f32c",
|
||||
"sha256:ade5e387d2ad0d7ebf59146cc00c8044acbd863725f887353a10df825fc8ae21",
|
||||
"sha256:b00c1de48212e4cc9603895652c5c410df699856a2853135b3967591e4beebc2",
|
||||
"sha256:b1282f8c00509d99fef04d8ba936b156d419be841854fe901d8ae224c59f0be5",
|
||||
"sha256:b2051432115498d3562c084a49bba65d97cf251f5a331c64a12ee7e04dacc51b",
|
||||
"sha256:ba59edeaa2fc6114428f1637ffff42da1e311e29382d81b339c1817d37ec93c6",
|
||||
"sha256:c8716a48d94b06bb3b2524c2b77e055fb313aeb4ea620c8dd03a105574ba704f",
|
||||
"sha256:cd5df75523866410809ca100dc9681e301e3c27567cf498077e8551b6d20e42f",
|
||||
"sha256:e249096428b3ae81b08327a63a485ad0878de3fb939049038579ac0ef61e17e7"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.1.1"
|
||||
},
|
||||
"more-itertools": {
|
||||
"hashes": [
|
||||
"sha256:38a936c0a6d98a38bcc2d03fdaaedaba9f412879461dd2ceff8d37564d6522e4",
|
||||
"sha256:c0a5785b1109a6bd7fac76d6837fd1feca158e54e521ccd2ae8bfe393cc9d4fc",
|
||||
"sha256:fe7a7cae1ccb57d33952113ff4fa1bc5f879963600ed74918f1236e212ee50b9"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==5.0.0"
|
||||
},
|
||||
"packaging": {
|
||||
"hashes": [
|
||||
"sha256:aec3fdbb8bc9e4bb65f0634b9f551ced63983a529d6a8931817d52fdd0816ddb",
|
||||
"sha256:fe1d8331dfa7cc0a883b49d75fc76380b2ab2734b220fbb87d774e4fd4b851f8"
|
||||
],
|
||||
"version": "==20.0"
|
||||
},
|
||||
"pathlib2": {
|
||||
"hashes": [
|
||||
"sha256:0ec8205a157c80d7acc301c0b18fbd5d44fe655968f5d947b6ecef5290fc35db",
|
||||
"sha256:6cd9a47b597b37cc57de1c05e56fb1a1c9cc9fab04fe78c29acd090418529868"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==2.3.5"
|
||||
},
|
||||
"pluggy": {
|
||||
"hashes": [
|
||||
"sha256:15b2acde666561e1298d71b523007ed7364de07029219b604cf808bfa1c765b0",
|
||||
"sha256:966c145cd83c96502c3c3868f50408687b38434af77734af1e9ca461a4081d2d"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==0.13.1"
|
||||
},
|
||||
"py": {
|
||||
"hashes": [
|
||||
"sha256:5e27081401262157467ad6e7f851b7aa402c5852dbcb3dae06768434de5752aa",
|
||||
"sha256:c20fdd83a5dbc0af9efd622bee9a5564e278f6380fffcacc43ba6f43db2813b0"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.8.1"
|
||||
},
|
||||
"pycparser": {
|
||||
"hashes": [
|
||||
"sha256:a988718abfad80b6b157acce7bf130a30876d27603738ac39f140993246b25b3"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==2.19"
|
||||
},
|
||||
"pyparsing": {
|
||||
"hashes": [
|
||||
"sha256:4c830582a84fb022400b85429791bc551f1f4871c33f23e44f353119e92f969f",
|
||||
"sha256:c342dccb5250c08d45fd6f8b4a559613ca603b57498511740e65cd11a2e7dcec"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==2.4.6"
|
||||
},
|
||||
"pyrsistent": {
|
||||
"hashes": [
|
||||
"sha256:f3b280d030afb652f79d67c5586157c5c1355c9a58dfc7940566e28d28f3df1b"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==0.15.6"
|
||||
},
|
||||
"pytest": {
|
||||
"hashes": [
|
||||
"sha256:6192875be8af57b694b7c4904e909680102befcb99e610ef3d9f786952f795aa",
|
||||
"sha256:f8447ebf8fd3d362868a5d3f43a9df786dfdfe9608843bd9002a2d47a104808f"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==4.6.8"
|
||||
},
|
||||
"pytest-cov": {
|
||||
"hashes": [
|
||||
"sha256:cc6742d8bac45070217169f5f72ceee1e0e55b0221f54bcf24845972d3a47f2b",
|
||||
"sha256:cdbdef4f870408ebdbfeb44e63e07eb18bb4619fae852f6e760645fa36172626"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==2.8.1"
|
||||
},
|
||||
"pytest-forked": {
|
||||
"hashes": [
|
||||
"sha256:1805699ed9c9e60cb7a8179b8d4fa2b8898098e82d229b0825d8095f0f261100",
|
||||
"sha256:1ae25dba8ee2e56fb47311c9638f9e58552691da87e82d25b0ce0e4bf52b7d87"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.1.3"
|
||||
},
|
||||
"pytest-xdist": {
|
||||
"hashes": [
|
||||
"sha256:0f46020d3d9619e6d17a65b5b989c1ebbb58fc7b1da8fb126d70f4bac4dfeed1",
|
||||
"sha256:7dc0d027d258cd0defc618fb97055fbd1002735ca7a6d17037018cf870e24011"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.31.0"
|
||||
},
|
||||
"pyyaml": {
|
||||
"hashes": [
|
||||
"sha256:0e7f69397d53155e55d10ff68fdfb2cf630a35e6daf65cf0bdeaf04f127c09dc",
|
||||
"sha256:2e9f0b7c5914367b0916c3c104a024bb68f269a486b9d04a2e8ac6f6597b7803",
|
||||
"sha256:35ace9b4147848cafac3db142795ee42deebe9d0dad885ce643928e88daebdcc",
|
||||
"sha256:38a4f0d114101c58c0f3a88aeaa44d63efd588845c5a2df5290b73db8f246d15",
|
||||
"sha256:483eb6a33b671408c8529106df3707270bfacb2447bf8ad856a4b4f57f6e3075",
|
||||
"sha256:4b6be5edb9f6bb73680f5bf4ee08ff25416d1400fbd4535fe0069b2994da07cd",
|
||||
"sha256:7f38e35c00e160db592091751d385cd7b3046d6d51f578b29943225178257b31",
|
||||
"sha256:8100c896ecb361794d8bfdb9c11fce618c7cf83d624d73d5ab38aef3bc82d43f",
|
||||
"sha256:c0ee8eca2c582d29c3c2ec6e2c4f703d1b7f1fb10bc72317355a746057e7346c",
|
||||
"sha256:e4c015484ff0ff197564917b4b4246ca03f411b9bd7f16e02a2f586eb48b6d04",
|
||||
"sha256:ebc4ed52dcc93eeebeae5cf5deb2ae4347b3a81c3fa12b0b8c976544829396a4"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==5.2"
|
||||
},
|
||||
"requests": {
|
||||
"hashes": [
|
||||
"sha256:11e007a8a2aa0323f5a921e9e6a2d7e4e67d9877e85773fba9ba6419025cbeb4",
|
||||
"sha256:9cf5292fcd0f598c671cfc1e0d7d1a7f13bb8085e9a590f48c010551dc6c4b31"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==2.22.0"
|
||||
},
|
||||
"scandir": {
|
||||
"hashes": [
|
||||
"sha256:2586c94e907d99617887daed6c1d102b5ca28f1085f90446554abf1faf73123e",
|
||||
"sha256:2ae41f43797ca0c11591c0c35f2f5875fa99f8797cb1a1fd440497ec0ae4b022",
|
||||
"sha256:2b8e3888b11abb2217a32af0766bc06b65cc4a928d8727828ee68af5a967fa6f",
|
||||
"sha256:2c712840c2e2ee8dfaf36034080108d30060d759c7b73a01a52251cc8989f11f",
|
||||
"sha256:4d4631f6062e658e9007ab3149a9b914f3548cb38bfb021c64f39a025ce578ae",
|
||||
"sha256:67f15b6f83e6507fdc6fca22fedf6ef8b334b399ca27c6b568cbfaa82a364173",
|
||||
"sha256:7d2d7a06a252764061a020407b997dd036f7bd6a175a5ba2b345f0a357f0b3f4",
|
||||
"sha256:8c5922863e44ffc00c5c693190648daa6d15e7c1207ed02d6f46a8dcc2869d32",
|
||||
"sha256:92c85ac42f41ffdc35b6da57ed991575bdbe69db895507af88b9f499b701c188",
|
||||
"sha256:b24086f2375c4a094a6b51e78b4cf7ca16c721dcee2eddd7aa6494b42d6d519d",
|
||||
"sha256:cb925555f43060a1745d0a321cca94bcea927c50114b623d73179189a4e100ac"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.10.0"
|
||||
},
|
||||
"six": {
|
||||
"hashes": [
|
||||
"sha256:1f1b7d42e254082a9db6279deae68afb421ceba6158efa6131de7b3003ee93fd",
|
||||
"sha256:30f610279e8b2578cab6db20741130331735c781b56053c59c4076da27f06b66"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.13.0"
|
||||
},
|
||||
"subprocess32": {
|
||||
"hashes": [
|
||||
"sha256:88e37c1aac5388df41cc8a8456bb49ebffd321a3ad4d70358e3518176de3a56b",
|
||||
"sha256:eb2937c80497978d181efa1b839ec2d9622cf9600a039a79d0e108d1f9aec79d"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==3.5.4"
|
||||
},
|
||||
"testinfra": {
|
||||
"hashes": [
|
||||
"sha256:780e6c2ab392ea93c26cee1777c968a144c2189a56b3e239a3a66e6d256925b5",
|
||||
"sha256:c3492b39c8d2c98d8419ce1a91d7fe348213f9b98b91198d2e7e88b3954b050b"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==3.3.0"
|
||||
},
|
||||
"texttable": {
|
||||
"hashes": [
|
||||
"sha256:7dc282a5b22564fe0fdc1c771382d5dd9a54742047c61558e071c8cd595add86",
|
||||
"sha256:eff3703781fbc7750125f50e10f001195174f13825a92a45e9403037d539b4f4"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.6.2"
|
||||
},
|
||||
"toml": {
|
||||
"hashes": [
|
||||
"sha256:229f81c57791a41d65e399fc06bf0848bab550a9dfd5ed66df18ce5f05e73d5c",
|
||||
"sha256:235682dd292d5899d361a811df37e04a8828a5b1da3115886b73cf81ebc9100e"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==0.10.0"
|
||||
},
|
||||
"tox": {
|
||||
"hashes": [
|
||||
"sha256:06ba73b149bf838d5cd25dc30c2dd2671ae5b2757cf98e5c41a35fe449f131b3",
|
||||
"sha256:806d0a9217584558cc93747a945a9d9bff10b141a5287f0c8429a08828a22192"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==3.14.3"
|
||||
},
|
||||
"urllib3": {
|
||||
"hashes": [
|
||||
"sha256:a8a318824cc77d1fd4b2bec2ded92646630d7fe8619497b142c84a9e6f5a7293",
|
||||
"sha256:f3c5fd51747d450d4dcf6f923c81f78f811aab8205fda64b0aba34a4e48b0745"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==1.25.7"
|
||||
},
|
||||
"virtualenv": {
|
||||
"hashes": [
|
||||
"sha256:0d62c70883c0342d59c11d0ddac0d954d0431321a41ab20851facf2b222598f3",
|
||||
"sha256:55059a7a676e4e19498f1aad09b8313a38fcc0cdbe4fdddc0e9b06946d21b4bb"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==16.7.9"
|
||||
},
|
||||
"wcwidth": {
|
||||
"hashes": [
|
||||
"sha256:3df37372226d6e63e1b1e1eda15c594bca98a22d33a23832a90998faa96bc65e",
|
||||
"sha256:f4ebe71925af7b40a864553f761ed559b43544f8f71746c2d756c7fe788ade7c"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==0.1.7"
|
||||
},
|
||||
"websocket-client": {
|
||||
"hashes": [
|
||||
"sha256:0fc45c961324d79c781bab301359d5a1b00b13ad1b10415a4780229ef71a5549",
|
||||
"sha256:d735b91d6d1692a6a181f2a8c9e0238e5f6373356f561bb9dc4c7af36f452010"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==0.57.0"
|
||||
},
|
||||
"zipp": {
|
||||
"hashes": [
|
||||
"sha256:3718b1cbcd963c7d4c5511a8240812904164b7f381b647143a89d3b98f9bcd8e",
|
||||
"sha256:f06903e9f1f43b12d371004b4ac7b06ab39a44adc747266928ae6debfa7b3335"
|
||||
],
|
||||
"index": "pypi",
|
||||
"version": "==0.6.0"
|
||||
}
|
||||
},
|
||||
"develop": {}
|
||||
}
|
||||
46
README.md
46
README.md
@@ -28,8 +28,8 @@ services:
|
||||
# WEBPASSWORD: 'set a secure password here or it will be random'
|
||||
# Volumes store your data between container upgrades
|
||||
volumes:
|
||||
- './etc-pihole/:/etc/pihole/'
|
||||
- './etc-dnsmasq.d/:/etc/dnsmasq.d/'
|
||||
- './etc-pihole/:/etc/pihole/'
|
||||
- './etc-dnsmasq.d/:/etc/dnsmasq.d/'
|
||||
dns:
|
||||
- 127.0.0.1
|
||||
- 1.1.1.1
|
||||
@@ -40,7 +40,7 @@ services:
|
||||
restart: unless-stopped
|
||||
```
|
||||
|
||||
[Here is an equivilent docker run script](https://github.com/pi-hole/docker-pi-hole/blob/master/docker_run.sh).
|
||||
[Here is an equivalent docker run script](https://github.com/pi-hole/docker-pi-hole/blob/master/docker_run.sh).
|
||||
|
||||
## Upgrade Notices:
|
||||
|
||||
@@ -75,7 +75,7 @@ A [Docker](https://www.docker.com/what-docker) project to make a lightweight x86
|
||||
|
||||
This container uses 2 popular ports, port 53 and port 80, so **may conflict with existing applications ports**. If you have no other services or docker containers using port 53/80 (if you do, keep reading below for a reverse proxy example), the minimum arguments required to run this container are in the script [docker_run.sh](https://github.com/pi-hole/docker-pi-hole/blob/master/docker_run.sh)
|
||||
|
||||
If you're using a Red Hat based distrubution with an SELinux Enforcing policy add `:z` to line with volumes like so:
|
||||
If you're using a Red Hat based distribution with an SELinux Enforcing policy add `:z` to line with volumes like so:
|
||||
|
||||
```
|
||||
-v "$(pwd)/etc-pihole/:/etc/pihole/:z" \
|
||||
@@ -102,6 +102,13 @@ There are other environment variables if you want to customize various things in
|
||||
| `WEBPASSWORD: <Admin password>`<br/> **Recommended** *Default: random* | http://pi.hole/admin password. Run `docker logs pihole \| grep random` to find your random pass.
|
||||
| `DNS1: <IP>`<br/> *Optional* *Default: 8.8.8.8* | Primary upstream DNS provider, default is google DNS
|
||||
| `DNS2: <IP>`<br/> *Optional* *Default: 8.8.4.4* | Secondary upstream DNS provider, default is google DNS, `no` if only one DNS should used
|
||||
| `DNSSEC: <True\|False>`<br/> *Optional* *Default: false* | Enable DNSSEC support
|
||||
| `DNS_BOGUS_PRIV: <True\|False>`<br/> *Optional* *Default: true* | Enable forwarding of reverse lookups for private ranges
|
||||
| `DNS_FQDN_REQUIRED: <True\|False>`<br/> *Optional* *Default: true* | Never forward non-FQDNs
|
||||
| `CONDITIONAL_FORWARDING: <True\|False>`<br/> *Optional* *Default: False* | Enable DNS conditional forwarding for device name resolution
|
||||
| `CONDITIONAL_FORWARDING_IP: <Router's IP>`<br/> *Optional* | If conditional forwarding is enabled, set the IP of the local network router
|
||||
| `CONDITIONAL_FORWARDING_DOMAIN: <Network Domain>`<br/> *Optional* | If conditional forwarding is enabled, set the domain of the local network router
|
||||
| `CONDITIONAL_FORWARDING_REVERSE: <Reverse DNS>`<br/> *Optional* | If conditional forwarding is enabled, set the reverse DNS of the local network router (e.g. `0.168.192.in-addr.arpa`)
|
||||
| `ServerIP: <Host's IP>`<br/> **Recommended** | **--net=host mode requires** Set to your server's LAN IP, used by web block modes and lighttpd bind address
|
||||
| `ServerIPv6: <Host's IPv6>`<br/> *Required if using IPv6* | **If you have a v6 network** set to your server's LAN IPv6 to block IPv6 ads fully
|
||||
| `VIRTUAL_HOST: <Custom Hostname>`<br/> *Optional* *Default: $ServerIP* | What your web server 'virtual host' is, accessing admin through this Hostname/IP allows you to make changes to the whitelist / blacklists in addition to the default 'http://pi.hole/admin/' address
|
||||
@@ -133,14 +140,37 @@ Here is a rundown of other arguments for your docker-compose / docker run.
|
||||
* [How do I set or reset the Web interface Password?](https://discourse.pi-hole.net/t/how-do-i-set-or-reset-the-web-interface-password/1328)
|
||||
* `docker exec -it pihole_container_name pihole -a -p` - then enter your password into the prompt
|
||||
* Port conflicts? Stop your server's existing DNS / Web services.
|
||||
* Ubuntu users especially may need to shut off dns on your docker server so it can run in the container on port 53
|
||||
* 17.04 and later should disable dnsmasq.
|
||||
* 17.10 should disable systemd-resolved service. See this page: [How to disable systemd-resolved in Ubuntu](https://askubuntu.com/questions/907246/how-to-disable-systemd-resolved-in-ubuntu)
|
||||
* Don't forget to stop your services from auto-starting again after you reboot
|
||||
* Ubuntu users see below for more detailed information
|
||||
* Port 80 is highly recommended because if you have another site/service using port 80 by default then the ads may not transform into blank ads correctly. To make sure docker-pi-hole plays nicely with an existing webserver you run you'll probably need a reverse proxy webserver config if you don't have one already. Pi-hole must be the default web app on the proxy e.g. if you go to your host by IP instead of domain then Pi-hole is served out instead of any other sites hosted by the proxy. This is the '[default_server](http://nginx.org/en/docs/http/ngx_http_core_module.html#listen)' in nginx or ['_default_' virtual host](https://httpd.apache.org/docs/2.4/vhosts/examples.html#default) in Apache and is taken advantage of so any undefined ad domain can be directed to your webserver and get a 'blocked' response instead of ads.
|
||||
* You can still map other ports to Pi-hole port 80 using docker's port forwarding like this `-p 8080:80`, but again the ads won't render properly. Changing the inner port 80 shouldn't be required unless you run docker host networking mode.
|
||||
* [Here is an example of running with jwilder/proxy](https://github.com/pi-hole/docker-pi-hole/blob/master/docker-compose-jwilder-proxy.yml) (an nginx auto-configuring docker reverse proxy for docker) on my port 80 with Pi-hole on another port. Pi-hole needs to be `DEFAULT_HOST` env in jwilder/proxy and you need to set the matching `VIRTUAL_HOST` for the Pi-hole's container. Please read jwilder/proxy readme for more info if you have trouble.
|
||||
|
||||
### Installing on Ubuntu
|
||||
Modern releases of Ubuntu (17.10+) include [`systemd-resolved`](http://manpages.ubuntu.com/manpages/bionic/man8/systemd-resolved.service.8.html) which is configured by default to implement a caching DNS stub resolver. This will prevent pi-hole from listening on port 53.
|
||||
The stub resolver should be disabled with: `sudo sed -r -i.orig 's/#?DNSStubListener=yes/DNSStubListener=no/g' /etc/systemd/resolved.conf`
|
||||
|
||||
This will not change the nameserver settings, which point to the stub resolver thus preventing DNS resolution. Change the `/etc/resolv.conf` symlink to point to `/run/systemd/resolve/resolv.conf`, which is automatically updated to follow the system's [`netplan`](https://netplan.io/):
|
||||
`sudo sh -c 'rm /etc/resolv.conf && ln -s /run/systemd/resolve/resolv.conf /etc/resolv.conf'`
|
||||
|
||||
Once pi-hole is installed, you'll want to configure your clients to use it ([see here](https://discourse.pi-hole.net/t/how-do-i-configure-my-devices-to-use-pi-hole-as-their-dns-server/245)). If you used the symlink above, your docker host will either use whatever is served by DHCP, or whatever static setting you've configured. If you want to explicitly set your docker host's nameservers you can edit the netplan(s) found at `/etc/netplan`, then run `sudo netplan apply`.
|
||||
Example netplan:
|
||||
```yaml
|
||||
network:
|
||||
ethernets:
|
||||
ens160:
|
||||
dhcp4: true
|
||||
dhcp4-overrides:
|
||||
use-dns: false
|
||||
nameservers:
|
||||
addresses: [127.0.0.1]
|
||||
version: 2
|
||||
```
|
||||
|
||||
Note that it is also possible to disable `systemd-resolved` entirely. However, this can cause problems with name resolution in vpns ([see bug report](https://bugs.launchpad.net/network-manager/+bug/1624317)). It also disables the functionality of netplan since systemd-resolved is used as the default renderer ([see `man netplan`](http://manpages.ubuntu.com/manpages/bionic/man5/netplan.5.html#description)). If you choose to disable the service, you will need to manually set the nameservers, for example by creating a new `/etc/resolv.conf`.
|
||||
|
||||
Users of older Ubuntu releases (circa 17.04) will need to disable dnsmasq.
|
||||
|
||||
## Docker tags and versioning
|
||||
|
||||
The primary docker tags / versions are explained in the following table. [Click here to see the full list of tags](https://store.docker.com/community/images/pihole/pihole/tags) ([arm tags are here](https://store.docker.com/community/images/pihole/pihole/tags)), I also try to tag with the specific version of Pi-hole Core for version archival purposes, the web version that comes with the core releases should be in the [GitHub Release notes](https://github.com/pi-hole/docker-pi-hole/releases).
|
||||
@@ -148,7 +178,7 @@ The primary docker tags / versions are explained in the following table. [Click
|
||||
| tag | architecture | description | Dockerfile |
|
||||
| --- | ------------ | ----------- | ---------- |
|
||||
| `latest` | auto detect | x86, arm, or arm64 container, docker auto detects your architecture. | [Dockerfile](https://github.com/pi-hole/docker-pi-hole/blob/master/Dockerfile_amd64) |
|
||||
| `v4.0.0-1` | auto detect | Versioned tags, if you want to pin against a specific version, use one of thesse | |
|
||||
| `v4.0.0-1` | auto detect | Versioned tags, if you want to pin against a specific version, use one of these | |
|
||||
| `v4.0.0-1_<arch>` | based on tag | Specific architectures tags | |
|
||||
| `dev` | auto detect | like latest tag, but for the development branch (pushed occasionally) | |
|
||||
|
||||
|
||||
@@ -14,7 +14,7 @@ To run the Dockerfile templating, image build, and tests all in one command just
|
||||
|
||||
Docker images built by `tox` or `python Dockerfile.py` are named the same but stripped of the `pihole/` docker repository namespace.
|
||||
|
||||
e.g. `pi-hole:debian_amd64` or `pi-hole-multiarch:debian_aarch64`
|
||||
e.g. `pi-hole:debian_amd64` or `pi-hole-multiarch:debian_arm64`
|
||||
|
||||
You can run the multiarch images on an amd64 development system if you [enable binfmt-support as described in the multiarch image docs](https://hub.docker.com/r/multiarch/multiarch/debian-debootstrap/)
|
||||
|
||||
|
||||
@@ -282,11 +282,11 @@ setup_web_port() {
|
||||
echo "Custom WEB_PORT set to $web_port"
|
||||
echo "INFO: Without proper router DNAT forwarding to $ServerIP:$web_port, you may not get any blocked websites on ads"
|
||||
|
||||
# Update lighttpd's port
|
||||
sed -i '/server.port\s*=\s*80\s*$/ s/80/'$WEB_PORT'/g' /etc/lighttpd/lighttpd.conf
|
||||
# Update any default port 80 references in the HTML
|
||||
grep -Prl '://127\.0\.0\.1/' /var/www/html/ | xargs -r sed -i "s|/127\.0\.0\.1/|/127.0.0.1:${WEB_PORT}/|g"
|
||||
grep -Prl '://pi\.hole/' /var/www/html/ | xargs -r sed -i "s|/pi\.hole/|/pi\.hole:${WEB_PORT}/|g"
|
||||
# Update lighttpd's port
|
||||
sed -i '/server.port\s*=\s*80\s*$/ s/80/'$WEB_PORT'/g' /etc/lighttpd/lighttpd.conf
|
||||
|
||||
}
|
||||
|
||||
|
||||
51
circle-deploy.sh
Executable file
51
circle-deploy.sh
Executable file
@@ -0,0 +1,51 @@
|
||||
#!/usr/bin/env bash
|
||||
set -ex
|
||||
# Circle CI Job for merging/deploying all architectures (post-test passing)
|
||||
. circle-vars.sh
|
||||
|
||||
annotate() {
|
||||
local base=$1
|
||||
local image=$2
|
||||
local arch=$3
|
||||
local annotate_flags="${annotate_map[$arch]}"
|
||||
|
||||
$dry docker manifest annotate ${base} ${image} --os linux ${annotate_flags}
|
||||
}
|
||||
|
||||
# Keep in sync with circle-ci job names
|
||||
declare -A annotate_map=(
|
||||
["amd64"]="--arch amd64"
|
||||
["armel"]="--arch arm --variant v6"
|
||||
["armhf"]="--arch arm --variant v7"
|
||||
["arm64"]="--arch arm64 --variant v8"
|
||||
)
|
||||
|
||||
# push image when not running a PR
|
||||
mkdir -p ~/.docker
|
||||
export DOCKER_CLI_EXPERIMENTAL='enabled'
|
||||
echo "{}" | jq '.experimental="enabled"' | tee ~/.docker/config.json
|
||||
docker info
|
||||
if [[ "$CIRCLE_PR_NUMBER" == "" ]]; then
|
||||
images=()
|
||||
echo $DOCKERHUB_PASS | docker login --username=$DOCKERHUB_USER --password-stdin
|
||||
ls -lat ./ci-workspace/
|
||||
cd ci-workspace
|
||||
|
||||
for arch in *; do
|
||||
arch_image=$(cat $arch)
|
||||
docker pull $arch_image
|
||||
images+=($arch_image)
|
||||
done
|
||||
|
||||
for docker_tag in $MULTIARCH_IMAGE $LATEST_IMAGE; do
|
||||
docker manifest create $docker_tag ${images[*]}
|
||||
for arch in *; do
|
||||
arch_image=$(cat $arch)
|
||||
docker pull $arch_image
|
||||
annotate "$docker_tag" "$arch_image" "$arch"
|
||||
done
|
||||
|
||||
docker manifest inspect "$docker_tag"
|
||||
docker manifest push "$docker_tag"
|
||||
done;
|
||||
fi
|
||||
30
circle-test.sh
Executable file
30
circle-test.sh
Executable file
@@ -0,0 +1,30 @@
|
||||
#!/usr/bin/env bash
|
||||
set -ex
|
||||
|
||||
# Circle CI Job for single architecture
|
||||
|
||||
# setup qemu/variables
|
||||
docker run --rm --privileged multiarch/qemu-user-static:register --reset > /dev/null
|
||||
. circle-vars.sh
|
||||
|
||||
if [[ "$1" == "enter" ]]; then
|
||||
enter="-it --entrypoint=sh"
|
||||
fi
|
||||
|
||||
# generate and build dockerfile
|
||||
docker build -t image_pipenv -f Dockerfile_build .
|
||||
env > /tmp/env
|
||||
docker run --rm \
|
||||
-v /var/run/docker.sock:/var/run/docker.sock \
|
||||
-v "$(pwd):/$(pwd)" \
|
||||
-w "$(pwd)" \
|
||||
-e PIPENV_CACHE_DIR="$(pwd)/.pipenv" \
|
||||
--env-file /tmp/env \
|
||||
$enter image_pipenv
|
||||
# docker run --rm -v /var/run/docker.sock:/var/run/docker.sock -v "$(pwd):/$(pwd)" -w "$(pwd)" --env-file /tmp/env image_pipenv /ws/Dockerfile.sh
|
||||
|
||||
docker images
|
||||
echo $DOCKERHUB_PASS | docker login --username=$DOCKERHUB_USER --password-stdin
|
||||
docker push $ARCH_IMAGE
|
||||
mkdir -p ci-workspace
|
||||
echo "$ARCH_IMAGE" | tee ./ci-workspace/$ARCH
|
||||
49
circle-vars.sh
Executable file
49
circle-vars.sh
Executable file
@@ -0,0 +1,49 @@
|
||||
set -a
|
||||
|
||||
CIRCLE_JOB="${CIRCLE_JOB:-}"
|
||||
ARCH="${ARCH:-$CIRCLE_JOB}"
|
||||
if [[ -z "$ARCH" ]] ; then
|
||||
echo "Defaulting arch to amd64"
|
||||
ARCH="amd64"
|
||||
fi
|
||||
BASE_IMAGE="${BASE_IMAGE:-${CIRCLE_PROJECT_REPONAME}}"
|
||||
if [[ -z "$BASE_IMAGE" ]] ; then
|
||||
echo "Defaulting image name to pihole"
|
||||
BASE_IMAGE="pihole"
|
||||
fi
|
||||
|
||||
# The docker image will match the github repo path by default but is overrideable with CircleCI environment
|
||||
# BASE_IMAGE Overridable by Circle environment, including namespace (e.g. BASE_IMAGE=bobsmith/test-img:latest)
|
||||
CIRCLE_PROJECT_USERNAME="${CIRCLE_PROJECT_USERNAME:-unset}"
|
||||
HUB_NAMESPACE="${HUB_NAMESPACE:-$CIRCLE_PROJECT_USERNAME}"
|
||||
[[ $CIRCLE_PROJECT_USERNAME == "pi-hole" ]] && HUB_NAMESPACE="pihole" # Custom mapping for namespace
|
||||
[[ $BASE_IMAGE != *"/"* ]] && BASE_IMAGE="${HUB_NAMESPACE}/${BASE_IMAGE}" # If missing namespace, add one
|
||||
|
||||
# Secondary docker tag info (origin github branch/tag) will get prepended also
|
||||
ARCH_IMAGE="$BASE_IMAGE"
|
||||
[[ $ARCH_IMAGE != *":"* ]] && ARCH_IMAGE="${BASE_IMAGE}:$ARCH" # If tag missing, add circle job name as a tag (architecture here)
|
||||
|
||||
DOCKER_TAG="${CIRCLE_TAG:-$CIRCLE_BRANCH}"
|
||||
if [[ -n "$DOCKER_TAG" ]]; then
|
||||
# remove latest tag if used (as part of a user provided image variable)
|
||||
ARCH_IMAGE="${ARCH_IMAGE/:latest/:}"
|
||||
# Prepend the github tag(version) or branch. image:arch = image:v1.0-arch
|
||||
ARCH_IMAGE="${ARCH_IMAGE/:/:${DOCKER_TAG}-}"
|
||||
# latest- sometimes has a trailing slash, remove it
|
||||
ARCH_IMAGE="${ARCH_IMAGE/%-/}"
|
||||
fi
|
||||
|
||||
# To get latest released, cut a release on https://github.com/pi-hole/docker-pi-hole/releases (manually gated for quality control)
|
||||
latest_tag=''
|
||||
if ! latest_tag=$(curl -sI https://github.com/pi-hole/docker-pi-hole/releases/latest | grep --color=never -i Location | awk -F / '{print $NF}' | tr -d '[:cntrl:]'); then
|
||||
print "Failed to retrieve latest docker-pi-hole release metadata"
|
||||
else
|
||||
if [[ "$DOCKER_TAG" == "$latest_tag" ]] ; then
|
||||
#LATEST_IMAGE="$BASE_IMAGE:latest"
|
||||
LATEST_IMAGE="$BASE_IMAGE:testing_latest_deleteme"
|
||||
fi
|
||||
fi
|
||||
|
||||
MULTIARCH_IMAGE="$BASE_IMAGE:$DOCKER_TAG"
|
||||
|
||||
set +a
|
||||
@@ -1,97 +0,0 @@
|
||||
#!/bin/bash -e
|
||||
# Script for manually pushing the docker arm images for pi-hole org members only
|
||||
# (no one else has docker repo permissions)
|
||||
if [ ! -f ~/.docker/config.json ] ; then
|
||||
echo "Error: You should setup your docker push authorization first"
|
||||
exit 1
|
||||
fi
|
||||
|
||||
parse_git_branch() {
|
||||
var="$(git branch 2> /dev/null | sed -e '/^[^*]/d' -e 's/* \(.*\)/\1/')"
|
||||
# convert release/ to release-
|
||||
echo "${var/release\//release-}"
|
||||
}
|
||||
|
||||
annotate() {
|
||||
local base=$1
|
||||
local image=$2
|
||||
local arch=${image##*_}
|
||||
local docker_arch=${arch_map[$arch]}
|
||||
|
||||
if [ -z $docker_arch ]; then
|
||||
echo "Unknown arch in docker tag: ${arch}"
|
||||
exit 1
|
||||
else
|
||||
$dry docker manifest annotate ${base} ${image} --os linux --arch ${docker_arch}
|
||||
fi
|
||||
}
|
||||
|
||||
namespace='pihole'
|
||||
localimg='pihole'
|
||||
remoteimg="$namespace/$localimg"
|
||||
branch="$(parse_git_branch)"
|
||||
local_version="$(cat VERSION)"
|
||||
version="${version:-unset}"
|
||||
dry="${dry}"
|
||||
latest="${latest:-false}" # true as shell env var to deploy latest
|
||||
|
||||
# arch aliases
|
||||
# ARMv6/armel doesn't have a FTL binary for v4.0 pi-hole
|
||||
declare -A arch_map=( ["amd64"]="amd64" ["armhf"]="arm" ["aarch64"]="arm64")
|
||||
|
||||
# Set anything to dry prior to running this in order to print what would run instead of actually run it.
|
||||
if [[ -n "$dry" ]]; then dry='echo '; fi
|
||||
|
||||
if [[ "$version" == 'unset' ]]; then
|
||||
if [[ "$branch" == "master" ]]; then
|
||||
echo "Version number var is unset and master branch needs a version...pass in \$version variable!"
|
||||
exit 1
|
||||
elif [[ "$branch" == "release-"* ]]; then
|
||||
version="$(echo $branch | grep -Po 'v[\d\w\.-]*')"
|
||||
echo "Version number is being taken from this release branch $version"
|
||||
else
|
||||
version="$branch"
|
||||
# Use a different image for segregating dev tags maybe? Not right now, just a thought I had
|
||||
#remoteimg="${namespace}/${localimg}-dev"
|
||||
echo "Using the branch ($branch) for deployed image version since not passed in"
|
||||
fi
|
||||
fi
|
||||
|
||||
echo "# DEPLOYING:"
|
||||
echo "version: $version"
|
||||
echo "branch: $branch"
|
||||
[[ -n "$dry" ]] && echo "DRY RUN: $dry"
|
||||
echo "Example tagging: docker tag ${localimg}:armhf ${remoteimg}:${version}_amd64"
|
||||
|
||||
if [[ -z "$dry" ]] ; then
|
||||
echo "Deleting all manifest data to work around cached old copies preventing updates"
|
||||
rm -rf ~/.docker/manifests/*
|
||||
fi
|
||||
|
||||
$dry tox
|
||||
|
||||
images=()
|
||||
for tag in ${!arch_map[@]}; do
|
||||
# Verison specific tags for ongoing history
|
||||
$dry docker tag $localimg:${local_version}_$tag $remoteimg:${version}_${tag}
|
||||
$dry docker push pihole/pihole:${version}_${tag}
|
||||
images+=(pihole/pihole:${version}_${tag})
|
||||
done
|
||||
|
||||
$dry docker manifest create --amend pihole/pihole:${version} ${images[*]}
|
||||
|
||||
for image in "${images[@]}"; do
|
||||
annotate pihole/pihole:${version} ${image}
|
||||
done
|
||||
|
||||
$dry docker manifest push pihole/pihole:${version}
|
||||
|
||||
# Floating latest tag alias
|
||||
if [[ "$latest" == 'true' && "$branch" == "master" ]] ; then
|
||||
latestimg="$remoteimg:latest"
|
||||
$dry docker manifest create --amend "$latestimg" ${images[*]}
|
||||
for image in "${images[@]}"; do
|
||||
annotate "$latestimg" "${image}"
|
||||
done
|
||||
$dry docker manifest push "$latestimg"
|
||||
fi
|
||||
@@ -2,61 +2,62 @@ version: "3"
|
||||
|
||||
# https://github.com/pi-hole/docker-pi-hole/blob/master/README.md
|
||||
|
||||
applist:
|
||||
image: jwilder/nginx-proxy
|
||||
ports:
|
||||
- '80:80'
|
||||
environment:
|
||||
DEFAULT_HOST: pihole.yourDomain.lan
|
||||
volumes:
|
||||
- '/var/run/docker.sock:/tmp/docker.sock'
|
||||
restart: always
|
||||
services:
|
||||
jwilder-proxy:
|
||||
image: jwilder/nginx-proxy
|
||||
ports:
|
||||
- '80:80'
|
||||
environment:
|
||||
DEFAULT_HOST: pihole.yourDomain.lan
|
||||
volumes:
|
||||
- '/var/run/docker.sock:/tmp/docker.sock'
|
||||
restart: always
|
||||
|
||||
pihole:
|
||||
image: pihole/pihole:latest
|
||||
dns:
|
||||
- 127.0.0.1
|
||||
- 1.1.1.1
|
||||
ports:
|
||||
- '53:53/tcp'
|
||||
- '53:53/udp'
|
||||
- "67:67/udp"
|
||||
- '8053:80/tcp'
|
||||
- "443:443/tcp"
|
||||
volumes:
|
||||
- './etc-pihole/:/etc/pihole/'
|
||||
- './etc-dnsmasq.d/:/etc/dnsmasq.d/'
|
||||
# run `touch ./var-log/pihole.log` first unless you like errors
|
||||
# - './var-log/pihole.log:/var/log/pihole.log'
|
||||
# Recommended but not required (DHCP needs NET_ADMIN)
|
||||
# https://github.com/pi-hole/docker-pi-hole#note-on-capabilities
|
||||
cap_add:
|
||||
- NET_ADMIN
|
||||
environment:
|
||||
ServerIP: 192.168.41.55
|
||||
PROXY_LOCATION: pihole
|
||||
VIRTUAL_HOST: pihole.yourDomain.lan
|
||||
VIRTUAL_PORT: 80
|
||||
extra_hosts:
|
||||
# Resolve to nothing domains (terminate connection)
|
||||
- 'nw2master.bioware.com nwn2.master.gamespy.com:0.0.0.0'
|
||||
# LAN hostnames for other docker containers using jwilder
|
||||
- 'yourDomain.lan:192.168.41.55'
|
||||
- 'pihole pihole.yourDomain.lan:192.168.41.55'
|
||||
- 'ghost ghost.yourDomain.lan:192.168.41.55'
|
||||
- 'wordpress wordpress.yourDomain.lan:192.168.41.55'
|
||||
restart: always
|
||||
pihole:
|
||||
image: pihole/pihole:latest
|
||||
dns:
|
||||
- 127.0.0.1
|
||||
- 1.1.1.1
|
||||
ports:
|
||||
- '53:53/tcp'
|
||||
- '53:53/udp'
|
||||
- "67:67/udp"
|
||||
- '8053:80/tcp'
|
||||
- "443:443/tcp"
|
||||
volumes:
|
||||
- './etc-pihole/:/etc/pihole/'
|
||||
- './etc-dnsmasq.d/:/etc/dnsmasq.d/'
|
||||
# run `touch ./var-log/pihole.log` first unless you like errors
|
||||
# - './var-log/pihole.log:/var/log/pihole.log'
|
||||
# Recommended but not required (DHCP needs NET_ADMIN)
|
||||
# https://github.com/pi-hole/docker-pi-hole#note-on-capabilities
|
||||
cap_add:
|
||||
- NET_ADMIN
|
||||
environment:
|
||||
ServerIP: 192.168.41.55
|
||||
PROXY_LOCATION: pihole
|
||||
VIRTUAL_HOST: pihole.yourDomain.lan
|
||||
VIRTUAL_PORT: 80
|
||||
extra_hosts:
|
||||
# Resolve to nothing domains (terminate connection)
|
||||
- 'nw2master.bioware.com nwn2.master.gamespy.com:0.0.0.0'
|
||||
# LAN hostnames for other docker containers using jwilder
|
||||
- 'yourDomain.lan:192.168.41.55'
|
||||
- 'pihole pihole.yourDomain.lan:192.168.41.55'
|
||||
- 'ghost ghost.yourDomain.lan:192.168.41.55'
|
||||
- 'wordpress wordpress.yourDomain.lan:192.168.41.55'
|
||||
restart: always
|
||||
|
||||
# Another container you might want to have running through the proxy
|
||||
# Note it also have ENV Vars like pihole and a host under pihole's extra_hosts
|
||||
#ghost:
|
||||
# image: fractalf/ghost
|
||||
# ports:
|
||||
# - '2368:2368/tcp'
|
||||
# volumes:
|
||||
# - '/etc/ghost/:/ghost-override'
|
||||
# environment:
|
||||
# PROXY_LOCATION: ghost
|
||||
# VIRTUAL_HOST: ghost.yourDomain.lan
|
||||
# VIRTUAL_PORT: 2368
|
||||
# restart: always
|
||||
# Another container you might want to have running through the proxy
|
||||
# Note it also have ENV Vars like pihole and a host under pihole's extra_hosts
|
||||
# ghost:
|
||||
# image: fractalf/ghost
|
||||
# ports:
|
||||
# - '2368:2368/tcp'
|
||||
# volumes:
|
||||
# - '/etc/ghost/:/ghost-override'
|
||||
# environment:
|
||||
# PROXY_LOCATION: ghost
|
||||
# VIRTUAL_HOST: ghost.yourDomain.lan
|
||||
# VIRTUAL_PORT: 2368
|
||||
# restart: always
|
||||
@@ -15,11 +15,11 @@ Please note the following about this [traefik](https://traefik.io/) example for
|
||||
version: '3'
|
||||
|
||||
services:
|
||||
#
|
||||
#
|
||||
traefik:
|
||||
container_name: traefik
|
||||
domainname: homedomain.lan
|
||||
|
||||
|
||||
image: traefik
|
||||
restart: unless-stopped
|
||||
# Note I opt to whitelist certain apps for exposure to traefik instead of auto discovery
|
||||
@@ -42,7 +42,7 @@ services:
|
||||
pihole:
|
||||
container_name: pihole
|
||||
domainname: homedomain.lan
|
||||
|
||||
|
||||
image: pihole/pihole:latest
|
||||
dns:
|
||||
- 127.0.0.1
|
||||
@@ -106,4 +106,3 @@ traefik | time="2018-03-07T18:57:42Z" level=info msg="Server configuration re
|
||||
```
|
||||
|
||||
Also your port 8080 should list the Route/Rule for pihole and backend-pihole container.
|
||||
|
||||
|
||||
@@ -18,10 +18,10 @@ services:
|
||||
# WEBPASSWORD: 'set a secure password here or it will be random'
|
||||
# Volumes store your data between container upgrades
|
||||
volumes:
|
||||
- './etc-pihole/:/etc/pihole/'
|
||||
- './etc-dnsmasq.d/:/etc/dnsmasq.d/'
|
||||
# run `touch ./var-log/pihole.log` first unless you like errors
|
||||
# - './var-log/pihole.log:/var/log/pihole.log'
|
||||
- './etc-pihole/:/etc/pihole/'
|
||||
- './etc-dnsmasq.d/:/etc/dnsmasq.d/'
|
||||
# run `touch ./var-log/pihole.log` first unless you like errors
|
||||
# - './var-log/pihole.log:/var/log/pihole.log'
|
||||
dns:
|
||||
- 127.0.0.1
|
||||
- 1.1.1.1
|
||||
|
||||
@@ -26,7 +26,7 @@ for i in $(seq 1 20); do
|
||||
fi
|
||||
|
||||
if [ $i -eq 20 ] ; then
|
||||
echo -e "\nTimed out waiting for Pi-hole start start, consult check your container logs for more info (\`docker logs pihole\`)"
|
||||
echo -e "\nTimed out waiting for Pi-hole start, consult check your container logs for more info (\`docker logs pihole\`)"
|
||||
exit 1
|
||||
fi
|
||||
done;
|
||||
|
||||
43
install.sh
43
install.sh
@@ -4,7 +4,7 @@ mkdir -p /etc/pihole/
|
||||
mkdir -p /var/run/pihole
|
||||
# Production tags with valid web footers
|
||||
export CORE_VERSION="$(cat /etc/docker-pi-hole-version)"
|
||||
export WEB_VERSION="${CORE_VERSION}"
|
||||
export WEB_VERSION="v4.3.3"
|
||||
|
||||
# Only use for pre-production / testing
|
||||
export CHECKOUT_BRANCHES=false
|
||||
@@ -14,7 +14,7 @@ if [[ "$CORE_VERSION" == *"release/"* ]] ; then
|
||||
fi
|
||||
|
||||
apt-get update
|
||||
apt-get install -y curl procps
|
||||
apt-get install --no-install-recommends -y curl procps ca-certificates
|
||||
curl -L -s $S6OVERLAY_RELEASE | tar xvzf - -C /
|
||||
mv /init /s6-init
|
||||
|
||||
@@ -23,7 +23,7 @@ which debconf-apt-progress
|
||||
mv "$(which debconf-apt-progress)" /bin/no_debconf-apt-progress
|
||||
|
||||
# Get the install functions
|
||||
curl https://raw.githubusercontent.com/pi-hole/pi-hole/${CORE_VERSION}/automated%20install/basic-install.sh > "$PIHOLE_INSTALL"
|
||||
curl https://raw.githubusercontent.com/pi-hole/pi-hole/${CORE_VERSION}/automated%20install/basic-install.sh > "$PIHOLE_INSTALL"
|
||||
PH_TEST=true . "${PIHOLE_INSTALL}"
|
||||
|
||||
# Preseed variables to assist with using --unattended install
|
||||
@@ -47,19 +47,21 @@ distro_check
|
||||
apt-get -y install debconf-utils
|
||||
echo resolvconf resolvconf/linkify-resolvconf boolean false | debconf-set-selections
|
||||
|
||||
# Tried this - unattended causes starting services during a build, should probably PR a flag to shut that off and switch to that
|
||||
#bash -ex "./${PIHOLE_INSTALL}" --unattended
|
||||
install_dependent_packages INSTALLER_DEPS[@]
|
||||
install_dependent_packages PIHOLE_DEPS[@]
|
||||
install_dependent_packages PIHOLE_WEB_DEPS[@]
|
||||
ln -s /bin/true /usr/local/bin/service
|
||||
bash -ex "./${PIHOLE_INSTALL}" --unattended
|
||||
rm /usr/local/bin/service
|
||||
# Old way of setting up
|
||||
#install_dependent_packages INSTALLER_DEPS[@]
|
||||
#install_dependent_packages PIHOLE_DEPS[@]
|
||||
#install_dependent_packages PIHOLE_WEB_DEPS[@]
|
||||
# IPv6 support for nc openbsd better than traditional
|
||||
apt-get install -y --force-yes netcat-openbsd
|
||||
|
||||
piholeGitUrl="${piholeGitUrl}"
|
||||
webInterfaceGitUrl="${webInterfaceGitUrl}"
|
||||
webInterfaceDir="${webInterfaceDir}"
|
||||
git clone "${piholeGitUrl}" "${PI_HOLE_LOCAL_REPO}"
|
||||
git clone "${webInterfaceGitUrl}" "${webInterfaceDir}"
|
||||
#git clone --branch "${CORE_VERSION}" --depth 1 "${piholeGitUrl}" "${PI_HOLE_LOCAL_REPO}"
|
||||
#git clone --branch "${WEB_VERSION}" --depth 1 "${webInterfaceGitUrl}" "${webInterfaceDir}"
|
||||
|
||||
tmpLog="/tmp/pihole-install.log"
|
||||
installLogLoc="${installLogLoc}"
|
||||
@@ -67,6 +69,18 @@ FTLdetect 2>&1 | tee "${tmpLog}"
|
||||
installPihole 2>&1 | tee "${tmpLog}"
|
||||
mv "${tmpLog}" /
|
||||
|
||||
fetch_release_metadata() {
|
||||
local directory="$1"
|
||||
local version="$2"
|
||||
pushd "$directory"
|
||||
git fetch -t
|
||||
git remote set-branches origin '*'
|
||||
git fetch --depth 10
|
||||
git checkout master
|
||||
git reset --hard "$version"
|
||||
popd
|
||||
}
|
||||
|
||||
if [[ $CHECKOUT_BRANCHES == true ]] ; then
|
||||
ln -s /bin/true /usr/local/bin/service
|
||||
ln -s /bin/true /usr/local/bin/update-rc.d
|
||||
@@ -78,8 +92,13 @@ if [[ $CHECKOUT_BRANCHES == true ]] ; then
|
||||
unlink /usr/local/bin/update-rc.d
|
||||
else
|
||||
# Reset to our tags so version numbers get detected correctly
|
||||
pushd "${PI_HOLE_LOCAL_REPO}"; git reset --hard "${CORE_VERSION}"; popd;
|
||||
pushd "${webInterfaceDir}"; git reset --hard "${WEB_VERSION}"; popd;
|
||||
fetch_release_metadata "${PI_HOLE_LOCAL_REPO}" "${CORE_VERSION}"
|
||||
fetch_release_metadata "${webInterfaceDir}" "${WEB_VERSION}"
|
||||
fi
|
||||
# FTL Armel fix not in prod yet
|
||||
# Remove once https://github.com/pi-hole/pi-hole/commit/3fbb0ac8dde14b8edc1982ae3a2a021f3cf68477 is in master
|
||||
if [[ "$ARCH" == 'armel' ]]; then
|
||||
curl -o /usr/bin/pihole-FTL https://ftl.pi-hole.net/development/pihole-FTL-armel-native
|
||||
fi
|
||||
|
||||
sed -i 's/readonly //g' /opt/pihole/webpage.sh
|
||||
|
||||
@@ -1,7 +1,53 @@
|
||||
docker-compose
|
||||
jinja2
|
||||
pytest>=3.6.0
|
||||
pytest-cov
|
||||
pytest-xdist
|
||||
testinfra==1.5.1
|
||||
tox
|
||||
apipkg==1.5
|
||||
atomicwrites==1.3.0
|
||||
attrs==19.3.0
|
||||
backports.shutil-get-terminal-size==1.0.0
|
||||
backports.ssl-match-hostname==3.7.0.1
|
||||
bcrypt==3.1.7
|
||||
cached-property==1.5.1
|
||||
certifi==2019.11.28
|
||||
cffi==1.13.2
|
||||
chardet==3.0.4
|
||||
configparser==4.0.2
|
||||
contextlib2==0.6.0.post1
|
||||
coverage==5.0.1
|
||||
cryptography==2.8
|
||||
docker==4.1.0
|
||||
dockerpty==0.4.1
|
||||
docopt==0.6.2
|
||||
enum34==1.1.6
|
||||
execnet==1.7.1
|
||||
filelock==3.0.12
|
||||
funcsigs==1.0.2
|
||||
idna==2.8
|
||||
importlib-metadata==1.3.0
|
||||
ipaddress==1.0.23
|
||||
Jinja2==2.10.3
|
||||
jsonschema==3.2.0
|
||||
MarkupSafe==1.1.1
|
||||
more-itertools==5.0.0
|
||||
packaging==19.2
|
||||
pathlib2==2.3.5
|
||||
pluggy==0.13.1
|
||||
py==1.8.1
|
||||
pycparser==2.19
|
||||
pyparsing==2.4.6
|
||||
pyrsistent==0.15.6
|
||||
pytest==4.6.8
|
||||
pytest-cov==2.8.1
|
||||
pytest-forked==1.1.3
|
||||
pytest-xdist==1.31.0
|
||||
PyYAML==5.2
|
||||
requests==2.22.0
|
||||
scandir==1.10.0
|
||||
six==1.13.0
|
||||
subprocess32==3.5.4
|
||||
testinfra==3.3.0
|
||||
texttable==1.6.2
|
||||
toml==0.10.0
|
||||
tox==3.14.3
|
||||
urllib3==1.25.7
|
||||
virtualenv==16.7.9
|
||||
wcwidth==0.1.7
|
||||
websocket-client==0.57.0
|
||||
zipp==0.6.0
|
||||
|
||||
14
start.sh
14
start.sh
@@ -10,10 +10,17 @@ export HOSTNAME
|
||||
export WEBLOGDIR
|
||||
export DNS1
|
||||
export DNS2
|
||||
export DNSSEC
|
||||
export DNS_BOGUS_PRIV
|
||||
export DNS_FQDN_REQUIRED
|
||||
export INTERFACE
|
||||
export DNSMASQ_LISTENING_BEHAVIOUR="$DNSMASQ_LISTENING"
|
||||
export IPv6
|
||||
export WEB_PORT
|
||||
export CONDITIONAL_FORWARDING
|
||||
export CONDITIONAL_FORWARDING_IP
|
||||
export CONDITIONAL_FORWARDING_DOMAIN
|
||||
export CONDITIONAL_FORWARDING_REVERSE
|
||||
|
||||
export adlistFile='/etc/pihole/adlists.list'
|
||||
|
||||
@@ -40,6 +47,13 @@ validate_env || exit 1
|
||||
prepare_configs
|
||||
change_setting "IPV4_ADDRESS" "$ServerIP"
|
||||
change_setting "IPV6_ADDRESS" "$ServerIPv6"
|
||||
change_setting "DNS_BOGUS_PRIV" "$DNS_BOGUS_PRIV"
|
||||
change_setting "DNS_FQDN_REQUIRED" "$DNS_FQDN_REQUIRED"
|
||||
change_setting "DNSSEC" "$DNSSEC"
|
||||
change_setting "CONDITIONAL_FORWARDING" "$CONDITIONAL_FORWARDING"
|
||||
change_setting "CONDITIONAL_FORWARDING_IP" "$CONDITIONAL_FORWARDING_IP"
|
||||
change_setting "CONDITIONAL_FORWARDING_DOMAIN" "$CONDITIONAL_FORWARDING_DOMAIN"
|
||||
change_setting "CONDITIONAL_FORWARDING_REVERSE" "$CONDITIONAL_FORWARDING_REVERSE"
|
||||
setup_web_port "$WEB_PORT"
|
||||
setup_web_password "$WEBPASSWORD"
|
||||
setup_dnsmasq "$DNS1" "$DNS2" "$INTERFACE" "$DNSMASQ_LISTENING_BEHAVIOUR"
|
||||
|
||||
@@ -1,10 +1,12 @@
|
||||
|
||||
import functools
|
||||
import os
|
||||
import pytest
|
||||
import testinfra
|
||||
import os
|
||||
import types
|
||||
|
||||
check_output = testinfra.get_backend(
|
||||
"local://"
|
||||
).get_module("Command").check_output
|
||||
local_host = testinfra.get_host('local://')
|
||||
check_output = local_host.check_output
|
||||
|
||||
__version__ = None
|
||||
dotdot = os.path.abspath(os.path.join(os.path.abspath(__file__), os.pardir, os.pardir))
|
||||
@@ -22,7 +24,7 @@ def args_volumes():
|
||||
|
||||
@pytest.fixture()
|
||||
def args_env():
|
||||
return '-e ServerIP="127.0.0.1" -e ServerIPv6="::1"'
|
||||
return '-e ServerIP="127.0.0.1"'
|
||||
|
||||
@pytest.fixture()
|
||||
def args(args_dns, args_volumes, args_env):
|
||||
@@ -34,14 +36,14 @@ def test_args():
|
||||
return ''
|
||||
|
||||
def DockerGeneric(request, _test_args, _args, _image, _cmd, _entrypoint):
|
||||
assert 'docker' in check_output('id'), "Are you in the docker group?"
|
||||
#assert 'docker' in check_output('id'), "Are you in the docker group?"
|
||||
# Always appended PYTEST arg to tell pihole we're testing
|
||||
if 'pihole' in _image and 'PYTEST=1' not in _args:
|
||||
_args = '{} -e PYTEST=1'.format(_args)
|
||||
docker_run = 'docker run -d -t {args} {test_args} {entry} {image} {cmd}'\
|
||||
.format(args=_args, test_args=_test_args, entry=_entrypoint, image=_image, cmd=_cmd)
|
||||
# Print a human runable version of the container run command for faster debugging
|
||||
print docker_run.replace('-d -t', '--rm -it').replace('tail -f /dev/null', 'bash')
|
||||
print(docker_run.replace('-d -t', '--rm -it').replace('tail -f /dev/null', 'bash'))
|
||||
docker_id = check_output(docker_run)
|
||||
|
||||
def teardown():
|
||||
@@ -49,24 +51,9 @@ def DockerGeneric(request, _test_args, _args, _image, _cmd, _entrypoint):
|
||||
check_output("docker rm -f {}".format(docker_id))
|
||||
request.addfinalizer(teardown)
|
||||
|
||||
docker_container = testinfra.get_backend("docker://" + docker_id)
|
||||
docker_container = testinfra.backend.get_backend("docker://" + docker_id, sudo=False)
|
||||
docker_container.id = docker_id
|
||||
|
||||
def run_bash(self, command, *args, **kwargs):
|
||||
cmd = self.get_command(command, *args)
|
||||
if self.user is not None:
|
||||
out = self.run_local(
|
||||
"docker exec -u %s %s /bin/bash -c %s",
|
||||
self.user, self.name, cmd)
|
||||
else:
|
||||
out = self.run_local(
|
||||
"docker exec %s /bin/bash -c %s", self.name, cmd)
|
||||
out.command = self.encode(cmd)
|
||||
return out
|
||||
|
||||
funcType = type(docker_container.run)
|
||||
# override run function to use bash not sh
|
||||
docker_container.run = funcType(run_bash, docker_container, testinfra.backend.docker.DockerBackend)
|
||||
return docker_container
|
||||
|
||||
|
||||
@@ -88,7 +75,7 @@ def DockerPersist(request, persist_test_args, persist_args, persist_image, persi
|
||||
def entrypoint():
|
||||
return ''
|
||||
|
||||
@pytest.fixture(params=['amd64', 'armhf', 'aarch64'])
|
||||
@pytest.fixture(params=['amd64', 'armhf', 'arm64', 'armel'])
|
||||
def arch(request):
|
||||
return request.param
|
||||
|
||||
@@ -133,7 +120,7 @@ def persist_args_volumes():
|
||||
|
||||
@pytest.fixture(scope='module')
|
||||
def persist_args_env():
|
||||
return '-e ServerIP="127.0.0.1" -e ServerIPv6="::1"'
|
||||
return '-e ServerIP="127.0.0.1"'
|
||||
|
||||
@pytest.fixture(scope='module')
|
||||
def persist_args(persist_args_dns, persist_args_volumes, persist_args_env):
|
||||
@@ -177,7 +164,7 @@ def Slow():
|
||||
while True:
|
||||
try:
|
||||
assert check()
|
||||
except AssertionError, e:
|
||||
except AssertionError as e:
|
||||
if time.time() < timeout_at:
|
||||
time.sleep(1)
|
||||
else:
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
|
||||
import os
|
||||
import pytest
|
||||
import re
|
||||
@@ -21,7 +22,7 @@ def test_IPv6_not_True_removes_ipv6(Docker, Slow, test_args, expected_ipv6, expe
|
||||
# On overlay2(?) docker sometimes writes to disk are slow enough to break some tests...
|
||||
expected_ipv6_check = lambda: (\
|
||||
IPV6_LINE in Docker.run('grep \'use-ipv6.pl\' {}'.format(WEB_CONFIG)).stdout
|
||||
) == expected_ipv6
|
||||
) == expected_ipv6
|
||||
Slow(expected_ipv6_check)
|
||||
|
||||
|
||||
@@ -35,13 +36,14 @@ def test_overrides_default_WEB_PORT(Docker, Slow, test_args):
|
||||
assert "Custom WEB_PORT set to 999" in function.stdout
|
||||
assert "INFO: Without proper router DNAT forwarding to 127.0.0.1:999, you may not get any blocked websites on ads" in function.stdout
|
||||
Slow(lambda: re.search(CONFIG_LINE, Docker.run('cat {}'.format(WEB_CONFIG)).stdout) != None)
|
||||
Slow(lambda: re.search('://127.0.0.1:999/', Docker.run('cat /var/www/html/pihole/index.php').stdout) != None)
|
||||
# grep fails to find any of the old address w/o port
|
||||
assert Docker.run('grep -rq "://127.0.0.1/" /var/www/html/').rc == 1
|
||||
assert Docker.run('grep -rq "://pi.hole/" /var/www/html/').rc == 1
|
||||
# Find at least one instance of our changes
|
||||
# upstream repos determines how many and I don't want to keep updating this test
|
||||
assert int(Docker.run('grep -rl "://127.0.0.1:999/" /var/www/html/ | wc -l').stdout) >= 1
|
||||
assert int(Docker.run('grep -rl "://pi.hole:999/" /var/www/html/ | wc -l').stdout) >= 1
|
||||
#assert Docker.run('grep -r "://127.0.0.1/" /var/www/html/').stdout == ''
|
||||
#assert Docker.run('grep -r "://pi.hole/" /var/www/html/').stdout == ''
|
||||
## Find at least one instance of our changes
|
||||
## upstream repos determines how many and I don't want to keep updating this test
|
||||
#assert int(Docker.run('grep -rl "://127.0.0.1:999/" /var/www/html/ | wc -l').stdout) >= 1
|
||||
#assert int(Docker.run('grep -rl "://pi.hole:999/" /var/www/html/ | wc -l').stdout) >= 1
|
||||
|
||||
|
||||
@pytest.mark.parametrize('test_args,expected_error', [
|
||||
@@ -70,7 +72,7 @@ def test_override_default_servers_with_DNS_EnvVars(Docker, Slow, args_env, expec
|
||||
function = Docker.run('. /bash_functions.sh ; eval `grep "^setup_dnsmasq " /start.sh`')
|
||||
assert expected_stdout in function.stdout
|
||||
expected_servers = 'server={}\n'.format(dns1) if dns2 == None else 'server={}\nserver={}\n'.format(dns1, dns2)
|
||||
Slow(lambda: expected_servers == Docker.run('grep "^server=" /etc/dnsmasq.d/01-pihole.conf').stdout)
|
||||
Slow(lambda: expected_servers == Docker.run('grep "^server=[^/]" /etc/dnsmasq.d/01-pihole.conf').stdout)
|
||||
|
||||
|
||||
@pytest.mark.skipif(os.environ.get('TRAVIS') == 'true',
|
||||
@@ -110,7 +112,7 @@ def test_DNS_Envs_are_secondary_to_setupvars(Docker, Slow, args_env, expected_st
|
||||
expected_servers = ['server={}'.format(dns1)]
|
||||
if dns2:
|
||||
expected_servers.append('server={}'.format(dns2))
|
||||
Slow(lambda: Docker.run('grep "^server=" /etc/dnsmasq.d/01-pihole.conf').stdout.strip().split('\n') == \
|
||||
Slow(lambda: Docker.run('grep "^server=[^/]" /etc/dnsmasq.d/01-pihole.conf').stdout.strip().split('\n') == \
|
||||
expected_servers)
|
||||
|
||||
|
||||
@@ -197,5 +199,5 @@ def test_webPassword_pre_existing_trumps_all_envs(Docker, args_env, test_args):
|
||||
def test_docker_checks_for_resolvconf_misconfiguration(Docker, args_dns, expected_stdout):
|
||||
''' The container checks for misconfigured resolv.conf '''
|
||||
function = Docker.run('. /bash_functions.sh ; eval `grep docker_checks /start.sh`')
|
||||
print function.stdout
|
||||
print(function.stdout)
|
||||
assert expected_stdout in function.stdout
|
||||
|
||||
@@ -1,3 +1,4 @@
|
||||
|
||||
import pytest
|
||||
import time
|
||||
''' conftest.py provides the defaults through fixtures '''
|
||||
@@ -44,10 +45,10 @@ def test_indecies_are_present(RunningPiHole):
|
||||
|
||||
def validate_curl(http_rc, expected_http_code, page_contents):
|
||||
if int(http_rc.rc) != 0 or int(http_rc.stdout) != expected_http_code:
|
||||
print 'CURL return code: {}'.format(http_rc.rc)
|
||||
print 'CURL stdout: {}'.format(http_rc.stdout)
|
||||
print 'CURL stderr:{}'.format(http_rc.stderr)
|
||||
print 'CURL file:\n{}\n'.format(page_contents.encode('utf-8'))
|
||||
print('CURL return code: {}'.format(http_rc.rc))
|
||||
print('CURL stdout: {}'.format(http_rc.stdout))
|
||||
print('CURL stderr:{}'.format(http_rc.stderr))
|
||||
print('CURL file:\n{}\n'.format(page_contents.encode('utf-8')))
|
||||
|
||||
|
||||
@pytest.mark.parametrize('addr', [ 'localhost' ] )
|
||||
|
||||
7
tox.ini
7
tox.ini
@@ -1,5 +1,5 @@
|
||||
[tox]
|
||||
envlist = py27
|
||||
envlist = py38
|
||||
|
||||
[testenv]
|
||||
whitelist_externals = docker
|
||||
@@ -8,6 +8,7 @@ deps = -rrequirements.txt
|
||||
commands = docker run --rm --privileged multiarch/qemu-user-static:register --reset
|
||||
./Dockerfile.py -v --arch amd64
|
||||
pytest -vv -n auto -k amd64 ./test/
|
||||
./Dockerfile.py -v --arch armhf --arch aarch64
|
||||
./Dockerfile.py -v --arch armhf --arch arm64 --arch armel
|
||||
pytest -vv -n auto -k arm64 ./test/
|
||||
pytest -vv -n auto -k armhf ./test/
|
||||
pytest -vv -n auto -k aarch64 ./test/
|
||||
pytest -vv -n auto -k armel ./test/
|
||||
|
||||
Reference in New Issue
Block a user