mirror of
https://github.com/cookiecutter/cookiecutter-django.git
synced 2025-03-03 19:08:15 +03:00
Added the awscli container for uploading and downloading backups from and to Amazon S3
This commit is contained in:
parent
3f756b7627
commit
8851ac13ef
|
@ -114,6 +114,7 @@ Listed in alphabetical order.
|
||||||
Irfan Ahmad `@erfaan`_ @erfaan
|
Irfan Ahmad `@erfaan`_ @erfaan
|
||||||
Jan Van Bruggen `@jvanbrug`_
|
Jan Van Bruggen `@jvanbrug`_
|
||||||
Jens Nilsson `@phiberjenz`_
|
Jens Nilsson `@phiberjenz`_
|
||||||
|
Jelmer Draaijer `@foarsitter`_
|
||||||
Jerome Leclanche `@jleclanche`_ @Adys
|
Jerome Leclanche `@jleclanche`_ @Adys
|
||||||
Jimmy Gitonga `@afrowave`_ @afrowave
|
Jimmy Gitonga `@afrowave`_ @afrowave
|
||||||
John Cass `@jcass77`_ @cass_john
|
John Cass `@jcass77`_ @cass_john
|
||||||
|
@ -231,6 +232,7 @@ Listed in alphabetical order.
|
||||||
.. _@eriol: https://github.com/eriol
|
.. _@eriol: https://github.com/eriol
|
||||||
.. _@eyadsibai: https://github.com/eyadsibai
|
.. _@eyadsibai: https://github.com/eyadsibai
|
||||||
.. _@flyudvik: https://github.com/flyudvik
|
.. _@flyudvik: https://github.com/flyudvik
|
||||||
|
.. _@foarsitter: https://github.com/foarsitter
|
||||||
.. _@garry-cairns: https://github.com/garry-cairns
|
.. _@garry-cairns: https://github.com/garry-cairns
|
||||||
.. _@garrypolley: https://github.com/garrypolley
|
.. _@garrypolley: https://github.com/garrypolley
|
||||||
.. _@goldhand: https://github.com/goldhand
|
.. _@goldhand: https://github.com/goldhand
|
||||||
|
|
|
@ -85,3 +85,11 @@ You will see something like ::
|
||||||
# ...
|
# ...
|
||||||
ALTER TABLE
|
ALTER TABLE
|
||||||
SUCCESS: The 'my_project' database has been restored from the '/backups/backup_2018_03_13T09_05_07.sql.gz' backup.
|
SUCCESS: The 'my_project' database has been restored from the '/backups/backup_2018_03_13T09_05_07.sql.gz' backup.
|
||||||
|
|
||||||
|
|
||||||
|
Backup to Amazon S3
|
||||||
|
----------------------------------
|
||||||
|
For uploading your backups to Amazon S3 you can use the aws cli container. There is a upload command for uploading the postgres /backups directory recursively and there is a download command for downloading a specific backup. The default S3 environment variables are used. ::
|
||||||
|
|
||||||
|
$ docker-compose -f production.yml run --rm awscli upload
|
||||||
|
$ docker-compose -f production.yml run --rm awscli download backup_2018_03_13T09_05_07.sql.gz
|
||||||
|
|
|
@ -0,0 +1,9 @@
|
||||||
|
FROM garland/aws-cli-docker:1.15.47
|
||||||
|
|
||||||
|
COPY ./compose/production/aws/maintenance /usr/local/bin/maintenance
|
||||||
|
COPY ./compose/production/postgres/maintenance/_sourced /usr/local/bin/maintenance/_sourced
|
||||||
|
|
||||||
|
RUN chmod +x /usr/local/bin/maintenance/*
|
||||||
|
|
||||||
|
RUN mv /usr/local/bin/maintenance/* /usr/local/bin \
|
||||||
|
&& rmdir /usr/local/bin/maintenance
|
|
@ -0,0 +1,24 @@
|
||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
### Download a file from your Amazon S3 bucket to the postgres /backups folder
|
||||||
|
###
|
||||||
|
### Usage:
|
||||||
|
### $ docker-compose -f production.yml run --rm awscli <1>
|
||||||
|
|
||||||
|
set -o errexit
|
||||||
|
set -o pipefail
|
||||||
|
set -o nounset
|
||||||
|
|
||||||
|
working_dir="$(dirname ${0})"
|
||||||
|
source "${working_dir}/_sourced/constants.sh"
|
||||||
|
source "${working_dir}/_sourced/messages.sh"
|
||||||
|
|
||||||
|
export AWS_ACCESS_KEY_ID="${DJANGO_AWS_ACCESS_KEY_ID}"
|
||||||
|
export AWS_SECRET_ACCESS_KEY="${DJANGO_AWS_SECRET_ACCESS_KEY}"
|
||||||
|
export AWS_STORAGE_BUCKET_NAME="${DJANGO_AWS_STORAGE_BUCKET_NAME}"
|
||||||
|
|
||||||
|
|
||||||
|
aws s3 cp s3://${AWS_STORAGE_BUCKET_NAME}${BACKUP_DIR_PATH}/${1} ${BACKUP_DIR_PATH}/${1}
|
||||||
|
|
||||||
|
message_success "Finished downloading ${1}."
|
||||||
|
|
|
@ -0,0 +1,30 @@
|
||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
### Upload the /backups folder to Amazon S3
|
||||||
|
###
|
||||||
|
### Usage:
|
||||||
|
### $ docker-compose -f production.yml run --rm awscli upload
|
||||||
|
|
||||||
|
set -o errexit
|
||||||
|
set -o pipefail
|
||||||
|
set -o nounset
|
||||||
|
|
||||||
|
working_dir="$(dirname ${0})"
|
||||||
|
source "${working_dir}/_sourced/constants.sh"
|
||||||
|
source "${working_dir}/_sourced/messages.sh"
|
||||||
|
|
||||||
|
export AWS_ACCESS_KEY_ID="${DJANGO_AWS_ACCESS_KEY_ID}"
|
||||||
|
export AWS_SECRET_ACCESS_KEY="${DJANGO_AWS_SECRET_ACCESS_KEY}"
|
||||||
|
export AWS_STORAGE_BUCKET_NAME="${DJANGO_AWS_STORAGE_BUCKET_NAME}"
|
||||||
|
|
||||||
|
|
||||||
|
message_info "Upload the backups directory to S3 bucket {$AWS_STORAGE_BUCKET_NAME}"
|
||||||
|
|
||||||
|
aws s3 cp ${BACKUP_DIR_PATH} s3://${AWS_STORAGE_BUCKET_NAME}${BACKUP_DIR_PATH} --recursive
|
||||||
|
|
||||||
|
message_info "Cleaning the directory ${BACKUP_DIR_PATH}"
|
||||||
|
|
||||||
|
rm -rf ${BACKUP_DIR_PATH}/*
|
||||||
|
|
||||||
|
message_success "Finished uploading and cleaning."
|
||||||
|
|
|
@ -65,3 +65,11 @@ services:
|
||||||
command: /start-flower
|
command: /start-flower
|
||||||
|
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
awscli:
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: ./compose/production/aws/Dockerfile
|
||||||
|
env_file:
|
||||||
|
- ./.envs/.production/.django
|
||||||
|
volumes:
|
||||||
|
- production_postgres_data_backups:/backups
|
||||||
|
|
Loading…
Reference in New Issue
Block a user