Added the awscli container for uploading and downloading backups from and to Amazon S3

This commit is contained in:
Jelmert 2018-11-08 11:14:58 +01:00
parent 3f756b7627
commit 8851ac13ef
6 changed files with 81 additions and 0 deletions

View File

@ -114,6 +114,7 @@ Listed in alphabetical order.
Irfan Ahmad `@erfaan`_ @erfaan
Jan Van Bruggen `@jvanbrug`_
Jens Nilsson `@phiberjenz`_
Jelmer Draaijer `@foarsitter`_
Jerome Leclanche `@jleclanche`_ @Adys
Jimmy Gitonga `@afrowave`_ @afrowave
John Cass `@jcass77`_ @cass_john
@ -231,6 +232,7 @@ Listed in alphabetical order.
.. _@eriol: https://github.com/eriol
.. _@eyadsibai: https://github.com/eyadsibai
.. _@flyudvik: https://github.com/flyudvik
.. _@foarsitter: https://github.com/foarsitter
.. _@garry-cairns: https://github.com/garry-cairns
.. _@garrypolley: https://github.com/garrypolley
.. _@goldhand: https://github.com/goldhand

View File

@ -85,3 +85,11 @@ You will see something like ::
# ...
ALTER TABLE
SUCCESS: The 'my_project' database has been restored from the '/backups/backup_2018_03_13T09_05_07.sql.gz' backup.
Backup to Amazon S3
----------------------------------
For uploading your backups to Amazon S3 you can use the aws cli container. There is a upload command for uploading the postgres /backups directory recursively and there is a download command for downloading a specific backup. The default S3 environment variables are used. ::
$ docker-compose -f production.yml run --rm awscli upload
$ docker-compose -f production.yml run --rm awscli download backup_2018_03_13T09_05_07.sql.gz

View File

@ -0,0 +1,9 @@
FROM garland/aws-cli-docker:1.15.47
COPY ./compose/production/aws/maintenance /usr/local/bin/maintenance
COPY ./compose/production/postgres/maintenance/_sourced /usr/local/bin/maintenance/_sourced
RUN chmod +x /usr/local/bin/maintenance/*
RUN mv /usr/local/bin/maintenance/* /usr/local/bin \
&& rmdir /usr/local/bin/maintenance

View File

@ -0,0 +1,24 @@
#!/bin/sh
### Download a file from your Amazon S3 bucket to the postgres /backups folder
###
### Usage:
### $ docker-compose -f production.yml run --rm awscli <1>
set -o errexit
set -o pipefail
set -o nounset
working_dir="$(dirname ${0})"
source "${working_dir}/_sourced/constants.sh"
source "${working_dir}/_sourced/messages.sh"
export AWS_ACCESS_KEY_ID="${DJANGO_AWS_ACCESS_KEY_ID}"
export AWS_SECRET_ACCESS_KEY="${DJANGO_AWS_SECRET_ACCESS_KEY}"
export AWS_STORAGE_BUCKET_NAME="${DJANGO_AWS_STORAGE_BUCKET_NAME}"
aws s3 cp s3://${AWS_STORAGE_BUCKET_NAME}${BACKUP_DIR_PATH}/${1} ${BACKUP_DIR_PATH}/${1}
message_success "Finished downloading ${1}."

View File

@ -0,0 +1,30 @@
#!/bin/sh
### Upload the /backups folder to Amazon S3
###
### Usage:
### $ docker-compose -f production.yml run --rm awscli upload
set -o errexit
set -o pipefail
set -o nounset
working_dir="$(dirname ${0})"
source "${working_dir}/_sourced/constants.sh"
source "${working_dir}/_sourced/messages.sh"
export AWS_ACCESS_KEY_ID="${DJANGO_AWS_ACCESS_KEY_ID}"
export AWS_SECRET_ACCESS_KEY="${DJANGO_AWS_SECRET_ACCESS_KEY}"
export AWS_STORAGE_BUCKET_NAME="${DJANGO_AWS_STORAGE_BUCKET_NAME}"
message_info "Upload the backups directory to S3 bucket {$AWS_STORAGE_BUCKET_NAME}"
aws s3 cp ${BACKUP_DIR_PATH} s3://${AWS_STORAGE_BUCKET_NAME}${BACKUP_DIR_PATH} --recursive
message_info "Cleaning the directory ${BACKUP_DIR_PATH}"
rm -rf ${BACKUP_DIR_PATH}/*
message_success "Finished uploading and cleaning."

View File

@ -65,3 +65,11 @@ services:
command: /start-flower
{%- endif %}
awscli:
build:
context: .
dockerfile: ./compose/production/aws/Dockerfile
env_file:
- ./.envs/.production/.django
volumes:
- production_postgres_data_backups:/backups