Merge pull request #1853 from foarsitter/aws_s3_backup_commands

Amazon S3 backup management commands
This commit is contained in:
Bruno Alla 2019-04-03 18:23:58 +01:00 committed by GitHub
commit ed75c5df3a
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
5 changed files with 79 additions and 0 deletions

View File

@ -85,3 +85,11 @@ You will see something like ::
# ...
ALTER TABLE
SUCCESS: The 'my_project' database has been restored from the '/backups/backup_2018_03_13T09_05_07.sql.gz' backup.
Backup to Amazon S3
----------------------------------
For uploading your backups to Amazon S3 you can use the aws cli container. There is an upload command for uploading the postgres /backups directory recursively and there is a download command for downloading a specific backup. The default S3 environment variables are used. ::
$ docker-compose -f production.yml run --rm awscli upload
$ docker-compose -f production.yml run --rm awscli download backup_2018_03_13T09_05_07.sql.gz

View File

@ -0,0 +1,9 @@
FROM garland/aws-cli-docker:1.15.47
COPY ./compose/production/aws/maintenance /usr/local/bin/maintenance
COPY ./compose/production/postgres/maintenance/_sourced /usr/local/bin/maintenance/_sourced
RUN chmod +x /usr/local/bin/maintenance/*
RUN mv /usr/local/bin/maintenance/* /usr/local/bin \
&& rmdir /usr/local/bin/maintenance

View File

@ -0,0 +1,24 @@
#!/bin/sh
### Download a file from your Amazon S3 bucket to the postgres /backups folder
###
### Usage:
### $ docker-compose -f production.yml run --rm awscli <1>
set -o errexit
set -o pipefail
set -o nounset
working_dir="$(dirname ${0})"
source "${working_dir}/_sourced/constants.sh"
source "${working_dir}/_sourced/messages.sh"
export AWS_ACCESS_KEY_ID="${DJANGO_AWS_ACCESS_KEY_ID}"
export AWS_SECRET_ACCESS_KEY="${DJANGO_AWS_SECRET_ACCESS_KEY}"
export AWS_STORAGE_BUCKET_NAME="${DJANGO_AWS_STORAGE_BUCKET_NAME}"
aws s3 cp s3://${AWS_STORAGE_BUCKET_NAME}${BACKUP_DIR_PATH}/${1} ${BACKUP_DIR_PATH}/${1}
message_success "Finished downloading ${1}."

View File

@ -0,0 +1,30 @@
#!/bin/sh
### Upload the /backups folder to Amazon S3
###
### Usage:
### $ docker-compose -f production.yml run --rm awscli upload
set -o errexit
set -o pipefail
set -o nounset
working_dir="$(dirname ${0})"
source "${working_dir}/_sourced/constants.sh"
source "${working_dir}/_sourced/messages.sh"
export AWS_ACCESS_KEY_ID="${DJANGO_AWS_ACCESS_KEY_ID}"
export AWS_SECRET_ACCESS_KEY="${DJANGO_AWS_SECRET_ACCESS_KEY}"
export AWS_STORAGE_BUCKET_NAME="${DJANGO_AWS_STORAGE_BUCKET_NAME}"
message_info "Upload the backups directory to S3 bucket {$AWS_STORAGE_BUCKET_NAME}"
aws s3 cp ${BACKUP_DIR_PATH} s3://${AWS_STORAGE_BUCKET_NAME}${BACKUP_DIR_PATH} --recursive
message_info "Cleaning the directory ${BACKUP_DIR_PATH}"
rm -rf ${BACKUP_DIR_PATH}/*
message_success "Finished uploading and cleaning."

View File

@ -65,3 +65,11 @@ services:
command: /start-flower
{%- endif %}
awscli:
build:
context: .
dockerfile: ./compose/production/aws/Dockerfile
env_file:
- ./.envs/.production/.django
volumes:
- production_postgres_data_backups:/backups