Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

feat(postgres): use docker's official postgres:11 image #4

Merged
merged 4 commits into from
Nov 14, 2018
Merged
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension

Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion .travis.yml
Original file line number Diff line number Diff line change
Expand Up @@ -2,7 +2,7 @@ sudo: required
language: go
services:
- docker
script: make test
script: make docker-build && make test
notifications:
slack:
secure: pWDCV3od8gxvzxh9DrOTvBL54XoCfWYhZZlwd2ZbyyOz6SS12Psg/ZuCT2253p4yMfF/LPlsz76mr7NgcCrMI0ReveTa/rnt3XBZtyY+1rlsQsy2oxgdAzbO587ENCQeMw2F/OWHaixMT8NDqxEqQd6xafK9Zmg6BeBjwgs7XfXKcR3WzNIuCO0ZG05+Yd0FIxmd/8Xm5tGiFEYr05+Ix6MLdF9MSCXZUPeu1EsYXhDljokLq49w63W1UMU10tm4t7VCEdaO+X9w6EJ5Ov8HDxb6L6IviUYY6+IGTZ01nwIoM6OrGQqfEAytYqgTKdehgQzQnAbLI6TW2wJ0twqEsLrlbTa4NW4j0KkazQJkN5kqcKYQvaeKJJhvJIG44Gi/u78pW3S6W7NU5DhrlE6bbxdIBHJW1vJBimkqu2oBNrO5ZoBB9MS9zflBsU5g/pQpVeHWMnWE8fcYDGa1PqAcr7q6wtdPsrVZhnHmmARN3PwZzIVVVsXbaIQG8VLC5grLGnwMf1Y1fz2nK3sVpCftvrYZT3G6CNAASo+eLOwYdZdiJ9jIS7WNLN1GtpIEvjeDt3QRqsDyH8YoAKUvY5h/v8IWPP/BaSwQbJwep4+Dj7xkpXX5/4wm4jEnVFV1p4xE0lD1AXvEMAVHtPhhggvscNhF9j6oeoPju6eTPcxG+5o=
86 changes: 27 additions & 59 deletions Dockerfile
Original file line number Diff line number Diff line change
@@ -1,79 +1,47 @@
FROM quay.io/deis/base:v0.3.6
FROM postgres:11

ENV LANG=en_US.utf8 \
PG_MAJOR=9.4 \
PG_VERSION=9.4.17-1.pgdg16.04+1 \
PGDATA=/var/lib/postgresql/data
ARG DEBIAN_FRONTEND=noninteractive
ARG BUILD_DEPS='gcc git libffi-dev libssl-dev python3-dev python3-pip python3-wheel'

# Set this separately from those above since it depends on one of them
ENV PATH=/usr/lib/postgresql/$PG_MAJOR/bin:$PATH

# Add postgres user and group
RUN adduser --system \
--shell /bin/bash \
--disabled-password \
--group \
postgres

RUN buildDeps='gcc git libffi-dev libssl-dev python3-dev python3-pip python3-wheel' && \
localedef -i en_US -c -f UTF-8 -A /etc/locale.alias en_US.UTF-8 && \
export DEBIAN_FRONTEND=noninteractive && \
apt-key adv --keyserver ha.pool.sks-keyservers.net --recv-keys B97B0AFCAA1A47F044F244A07FCC7D46ACCC4CF8 && \
echo 'deb http://apt.postgresql.org/pub/repos/apt/ xenial-pgdg main' $PG_MAJOR > /etc/apt/sources.list.d/pgdg.list && \
apt-get update && \
RUN apt-get update && \
apt-get install -y --no-install-recommends \
$buildDeps \
$BUILD_DEPS \
gosu \
lzop \
postgresql-$PG_MAJOR=$PG_VERSION \
postgresql-contrib-$PG_MAJOR=$PG_VERSION \
libpq-dev \
pv \
python3 \
postgresql-common \
util-linux \
# swift package needs pkg_resources and setuptools
python3-pkg-resources \
python3-setuptools && \
python3-setuptools \
python3-pip && \
ln -sf /usr/bin/python3 /usr/bin/python && \
ln -sf /usr/bin/pip3 /usr/bin/pip && \
mkdir -p /run/postgresql && \
chown -R postgres /run/postgresql && \
# setuptools from ubuntu archives is too old for googleapis-common-protos
pip install --upgrade setuptools && \
ln -sf /usr/bin/pip3 /usr/bin/pip

# setuptools from ubuntu archives is too old for googleapis-common-protos
RUN pip install --upgrade setuptools && \
pip install --disable-pip-version-check --no-cache-dir \
envdir==0.7 \
wal-e[aws,azure,google,swift]==v1.0.2 \
# pin azure-storage to version wal-e uses (see docker-entrypoint.sh)
azure-storage==0.20.0 && \
# "upgrade" boto to 2.43.0 + the patch to fix minio connections
pip install --disable-pip-version-check --no-cache-dir --upgrade git+https://github.com/teamhephy/boto@88c980e56d1053892eb940d43a15a68af4ebb5e6 && \
# cleanup
apt-get purge -y --auto-remove $buildDeps && \
envdir==1.0.1 \
wal-e[aws,azure,google,swift]==1.1.0 \
gcloud==0.18.3 \
oauth2client==4.1.3 \
azure-storage==0.20.0

# cleanup
RUN apt-get purge -y --auto-remove $BUILD_DEPS && \
apt-get autoremove -y && \
apt-get clean -y && \
# package up license files if any by appending to existing tar
COPYRIGHT_TAR='/usr/share/copyrights.tar' && \
gunzip -f $COPYRIGHT_TAR.gz && \
tar -rf $COPYRIGHT_TAR /usr/share/doc/*/copyright && \
gzip $COPYRIGHT_TAR && \
rm -rf \
/usr/share/doc \
/usr/share/man \
/usr/share/info \
/usr/share/locale \
/var/lib/apt/lists/* \
/var/log/* \
/var/cache/debconf/* \
/etc/systemd \
/lib/lsb \
/lib/udev \
/usr/lib/x86_64-linux-gnu/gconv/IBM* \
/usr/lib/x86_64-linux-gnu/gconv/EBC* && \
bash -c "mkdir -p /usr/share/man/man{1..8}"
apt-get clean -y

COPY rootfs /
ENV WALE_ENVDIR=/etc/wal-e.d/env
RUN mkdir -p $WALE_ENVDIR

ARG PATCH_CMD="python3 /patcher-script.py"
RUN $PATCH_CMD file /bin/create_bucket /patcher-script.d/patch_boto_s3.py
RUN $PATCH_CMD file /usr/local/bin/wal-e /patcher-script.d/patch_boto_s3.py
RUN $PATCH_CMD module wal_e.worker.worker_util /patcher-script.d/patch_wal_e_s3.py


CMD ["/docker-entrypoint.sh", "postgres"]
EXPOSE 5432
10 changes: 6 additions & 4 deletions rootfs/bin/create_bucket
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ def bucket_exists(conn, name):
return True

bucket_name = os.getenv('BUCKET_NAME')
region = os.getenv('AWS_REGION')
region = os.getenv('S3_REGION')

if os.getenv('DATABASE_STORAGE') == "s3":
conn = boto.s3.connect_to_region(region)
Expand Down Expand Up @@ -76,15 +76,17 @@ elif os.getenv('DATABASE_STORAGE') == "swift":
conn.put_container(os.getenv('BUCKET_NAME'))

else:
botoconfig.add_section('s3')
if not botoconfig.has_section("s3"):
botoconfig.add_section('s3')
botoconfig.set('s3', 'use-sigv4', 'True')
botoconfig.add_section('Boto')
if not botoconfig.has_section("Boto"):
botoconfig.add_section('Boto')
botoconfig.set('Boto', 'is_secure', 'False')
conn = S3Connection(
host=os.getenv('S3_HOST'),
port=int(os.getenv('S3_PORT')),
calling_format=OrdinaryCallingFormat())
# HACK(bacongobbler): allow boto to connect to minio by changing the region name for s3v4 auth
conn.auth_region_name = os.getenv('AWS_REGION')
conn.auth_region_name = os.getenv('S3_REGION')
if not bucket_exists(conn, bucket_name):
conn.create_bucket(bucket_name)
2 changes: 1 addition & 1 deletion rootfs/docker-entrypoint-initdb.d/001_setup_envdir.sh
Original file line number Diff line number Diff line change
Expand Up @@ -36,7 +36,7 @@ if [[ "$DATABASE_STORAGE" == "s3" || "$DATABASE_STORAGE" == "minio" ]]; then
else
echo "1" > AWS_INSTANCE_PROFILE
fi
echo $AWS_REGION > AWS_REGION
echo $AWS_REGION > S3_REGION
echo $BUCKET_NAME > BUCKET_NAME
elif [ "$DATABASE_STORAGE" == "gcs" ]; then
GOOGLE_APPLICATION_CREDENTIALS="/var/run/secrets/deis/objectstore/creds/key.json"
Expand Down
8 changes: 7 additions & 1 deletion rootfs/docker-entrypoint-initdb.d/003_restore_from_backup.sh
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,7 @@ gosu postgres pg_ctl -D "$PGDATA" -w restart
if [[ $(envdir "$WALE_ENVDIR" wal-e --terse backup-list | wc -l) -gt "1" ]]; then
echo "Found backups. Restoring from backup..."
gosu postgres pg_ctl -D "$PGDATA" -w stop
rm -rf "$PGDATA"
rm -rf "$PGDATA/*"
envdir "$WALE_ENVDIR" wal-e backup-fetch "$PGDATA" LATEST
cat << EOF > "$PGDATA/postgresql.conf"
# These settings are initialized by initdb, but they can be changed.
Expand Down Expand Up @@ -53,6 +53,12 @@ EOF
gosu postgres pg_ctl -D "$PGDATA" \
-o "-c listen_addresses=''" \
-w start

echo "Waiting for recovery completion..."
while [ ! -f "$PGDATA/recovery.done" ]
do
sleep 2
done
fi

echo "Performing an initial backup..."
Expand Down
9 changes: 9 additions & 0 deletions rootfs/patcher-script.d/patch_boto_s3.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,9 @@
def patch_boto_s3_hmac_auth_v4_handler():
import os
from boto.auth import HmacAuthV4Handler
_init = HmacAuthV4Handler.__init__
def wrap_init(self, *args, **kwargs):
_init(self, *args, **kwargs)
self.region_name = os.getenv('S3_REGION', self.region_name)
HmacAuthV4Handler.__init__ = wrap_init
patch_boto_s3_hmac_auth_v4_handler()
18 changes: 18 additions & 0 deletions rootfs/patcher-script.d/patch_wal_e_s3.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,18 @@
def patch_uri_put_file():
import os
from wal_e.blobstore import s3
from wal_e.blobstore.s3 import s3_util
def wrap_uri_put_file(creds, uri, fp, content_type=None, conn=None):
assert fp.tell() == 0
k = s3_util._uri_to_key(creds, uri, conn=conn)
if content_type is not None:
k.content_type = content_type
if os.getenv('DATABASE_STORAGE') == 's3':
encrypt_key=True
else:
encrypt_key=False
k.set_contents_from_file(fp, encrypt_key=encrypt_key)
return k
s3.uri_put_file = wrap_uri_put_file
s3_util.uri_put_file = wrap_uri_put_file
patch_uri_put_file()
41 changes: 41 additions & 0 deletions rootfs/patcher-script.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,41 @@
import sys

patch_script_tmp = """

def run_patch_scripts(patch_script_path):
with open(patch_script_path, 'r') as f:
try:
exec(f.read())
except:
pass
run_patch_scripts("%s")

"""


def main(patch_file, patch_script_file):
result_list = []
patch_script = patch_script_tmp % patch_script_file
with open(patch_file, "r") as f:
has_patched = False
for line in f:
if (line.startswith('import') or line.startswith('from')) \
and not has_patched:
result_list.append(patch_script)
has_patched = True
result_list.append(line)
if not has_patched: result_list.append(patch_script)
with open(patch_file, "w") as f:
for line in result_list:
f.write(line)

if __name__ == '__main__':
patch_type = sys.argv[1]
if patch_type == 'file':
patch_file = sys.argv[2]
elif patch_type == 'module':
module = __import__(sys.argv[2], fromlist=True)
patch_file = module.__file__
patch_script_file = sys.argv[3]
main(patch_file, patch_script_file)