mirror of
https://github.com/cookiecutter/cookiecutter-django.git
synced 2024-11-10 19:57:09 +03:00
Merge branch 'master' into upgrade/django-2.1
# Conflicts: # README.rst
This commit is contained in:
commit
39df72669e
|
@ -177,6 +177,7 @@ Listed in alphabetical order.
|
||||||
Travis McNeill `@Travistock`_ @tavistock_esq
|
Travis McNeill `@Travistock`_ @tavistock_esq
|
||||||
Tubo Shi `@Tubo`_
|
Tubo Shi `@Tubo`_
|
||||||
Umair Ashraf `@umrashrf`_ @fabumair
|
Umair Ashraf `@umrashrf`_ @fabumair
|
||||||
|
Vlad Doster `@vladdoster`_
|
||||||
Vitaly Babiy
|
Vitaly Babiy
|
||||||
Vivian Guillen `@viviangb`_
|
Vivian Guillen `@viviangb`_
|
||||||
Will Farley `@goldhand`_ @g01dhand
|
Will Farley `@goldhand`_ @g01dhand
|
||||||
|
@ -315,6 +316,7 @@ Listed in alphabetical order.
|
||||||
.. _@mrcoles: https://github.com/mrcoles
|
.. _@mrcoles: https://github.com/mrcoles
|
||||||
.. _@ericgroom: https://github.com/ericgroom
|
.. _@ericgroom: https://github.com/ericgroom
|
||||||
.. _@hanaquadara: https://github.com/hanaquadara
|
.. _@hanaquadara: https://github.com/hanaquadara
|
||||||
|
.. _@vladdoster: https://github.com/vladdoster
|
||||||
|
|
||||||
Special Thanks
|
Special Thanks
|
||||||
~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~
|
||||||
|
|
10
README.rst
10
README.rst
|
@ -47,7 +47,7 @@ Features
|
||||||
* Comes with custom user model ready to go
|
* Comes with custom user model ready to go
|
||||||
* Optional custom static build using Gulp and livereload
|
* Optional custom static build using Gulp and livereload
|
||||||
* Send emails via Anymail_ (using Mailgun_ by default, but switchable)
|
* Send emails via Anymail_ (using Mailgun_ by default, but switchable)
|
||||||
* Media storage using Amazon S3
|
* Media storage using Amazon S3 or Google Cloud Storage
|
||||||
* Docker support using docker-compose_ for development and production (using Traefik_ with LetsEncrypt_ support)
|
* Docker support using docker-compose_ for development and production (using Traefik_ with LetsEncrypt_ support)
|
||||||
* Procfile_ for deploying to Heroku
|
* Procfile_ for deploying to Heroku
|
||||||
* Instructions for deploying to PythonAnywhere_
|
* Instructions for deploying to PythonAnywhere_
|
||||||
|
@ -62,7 +62,7 @@ Optional Integrations
|
||||||
|
|
||||||
*These features can be enabled during initial project setup.*
|
*These features can be enabled during initial project setup.*
|
||||||
|
|
||||||
* Serve static files from Amazon S3 or Whitenoise_
|
* Serve static files from Amazon S3, Google Cloud Storage or Whitenoise_
|
||||||
* Configuration for Celery_ and Flower_ (the latter in Docker setup only)
|
* Configuration for Celery_ and Flower_ (the latter in Docker setup only)
|
||||||
* Integration with MailHog_ for local email testing
|
* Integration with MailHog_ for local email testing
|
||||||
* Integration with Sentry_ for error logging
|
* Integration with Sentry_ for error logging
|
||||||
|
@ -155,7 +155,7 @@ Answer the prompts with your own desired options_. For example::
|
||||||
project_slug [reddit_clone]: reddit
|
project_slug [reddit_clone]: reddit
|
||||||
author_name [Daniel Roy Greenfeld]: Daniel Greenfeld
|
author_name [Daniel Roy Greenfeld]: Daniel Greenfeld
|
||||||
email [you@example.com]: pydanny@gmail.com
|
email [you@example.com]: pydanny@gmail.com
|
||||||
description [A short description of the project.]: A reddit clone.
|
description [Behold My Awesome Project!]: A reddit clone.
|
||||||
domain_name [example.com]: myreddit.com
|
domain_name [example.com]: myreddit.com
|
||||||
version [0.1.0]: 0.0.1
|
version [0.1.0]: 0.0.1
|
||||||
timezone [UTC]: America/Los_Angeles
|
timezone [UTC]: America/Los_Angeles
|
||||||
|
@ -182,6 +182,10 @@ Answer the prompts with your own desired options_. For example::
|
||||||
1 - None
|
1 - None
|
||||||
2 - Gulp
|
2 - Gulp
|
||||||
Choose from 1, 2 [1]: 1
|
Choose from 1, 2 [1]: 1
|
||||||
|
Select cloud_provider:
|
||||||
|
1 - AWS
|
||||||
|
2 - GCS
|
||||||
|
Choose from 1, 2 [1]: 1
|
||||||
custom_bootstrap_compilation [n]: n
|
custom_bootstrap_compilation [n]: n
|
||||||
Select open_source_license:
|
Select open_source_license:
|
||||||
1 - MIT
|
1 - MIT
|
||||||
|
|
|
@ -31,6 +31,10 @@
|
||||||
"None",
|
"None",
|
||||||
"Gulp"
|
"Gulp"
|
||||||
],
|
],
|
||||||
|
"cloud_provider": [
|
||||||
|
"AWS",
|
||||||
|
"GCE"
|
||||||
|
],
|
||||||
"custom_bootstrap_compilation": "n",
|
"custom_bootstrap_compilation": "n",
|
||||||
"use_compressor": "n",
|
"use_compressor": "n",
|
||||||
"use_celery": "n",
|
"use_celery": "n",
|
||||||
|
|
|
@ -85,3 +85,11 @@ You will see something like ::
|
||||||
# ...
|
# ...
|
||||||
ALTER TABLE
|
ALTER TABLE
|
||||||
SUCCESS: The 'my_project' database has been restored from the '/backups/backup_2018_03_13T09_05_07.sql.gz' backup.
|
SUCCESS: The 'my_project' database has been restored from the '/backups/backup_2018_03_13T09_05_07.sql.gz' backup.
|
||||||
|
|
||||||
|
|
||||||
|
Backup to Amazon S3
|
||||||
|
----------------------------------
|
||||||
|
For uploading your backups to Amazon S3 you can use the aws cli container. There is an upload command for uploading the postgres /backups directory recursively and there is a download command for downloading a specific backup. The default S3 environment variables are used. ::
|
||||||
|
|
||||||
|
$ docker-compose -f production.yml run --rm awscli upload
|
||||||
|
$ docker-compose -f production.yml run --rm awscli download backup_2018_03_13T09_05_07.sql.gz
|
||||||
|
|
|
@ -64,6 +64,12 @@ js_task_runner:
|
||||||
1. None
|
1. None
|
||||||
2. Gulp_
|
2. Gulp_
|
||||||
|
|
||||||
|
cloud_provider:
|
||||||
|
Select a cloud provider for static & media files. The choices are:
|
||||||
|
|
||||||
|
1. AWS_
|
||||||
|
2. GCS_
|
||||||
|
|
||||||
custom_bootstrap_compilation:
|
custom_bootstrap_compilation:
|
||||||
Indicates whether the project should support Bootstrap recompilation
|
Indicates whether the project should support Bootstrap recompilation
|
||||||
via the selected JavaScript task runner's task. This can be useful
|
via the selected JavaScript task runner's task. This can be useful
|
||||||
|
@ -116,6 +122,9 @@ debug:
|
||||||
|
|
||||||
.. _Gulp: https://github.com/gulpjs/gulp
|
.. _Gulp: https://github.com/gulpjs/gulp
|
||||||
|
|
||||||
|
.. _AWS: https://aws.amazon.com/s3/
|
||||||
|
.. _GCS: https://cloud.google.com/storage/
|
||||||
|
|
||||||
.. _Django Compressor: https://github.com/django-compressor/django-compressor
|
.. _Django Compressor: https://github.com/django-compressor/django-compressor
|
||||||
|
|
||||||
.. _Celery: https://github.com/celery/celery
|
.. _Celery: https://github.com/celery/celery
|
||||||
|
|
|
@ -45,6 +45,8 @@ DJANGO_AWS_ACCESS_KEY_ID AWS_ACCESS_KEY_ID n/a
|
||||||
DJANGO_AWS_SECRET_ACCESS_KEY AWS_SECRET_ACCESS_KEY n/a raises error
|
DJANGO_AWS_SECRET_ACCESS_KEY AWS_SECRET_ACCESS_KEY n/a raises error
|
||||||
DJANGO_AWS_STORAGE_BUCKET_NAME AWS_STORAGE_BUCKET_NAME n/a raises error
|
DJANGO_AWS_STORAGE_BUCKET_NAME AWS_STORAGE_BUCKET_NAME n/a raises error
|
||||||
DJANGO_AWS_S3_REGION_NAME AWS_S3_REGION_NAME n/a None
|
DJANGO_AWS_S3_REGION_NAME AWS_S3_REGION_NAME n/a None
|
||||||
|
DJANGO_GCE_STORAGE_BUCKET_NAME GS_BUCKET_NAME n/a raises error
|
||||||
|
GOOGLE_APPLICATION_CREDENTIALS n/a n/a raises error
|
||||||
SENTRY_DSN SENTRY_DSN n/a raises error
|
SENTRY_DSN SENTRY_DSN n/a raises error
|
||||||
DJANGO_SENTRY_LOG_LEVEL SENTRY_LOG_LEVEL n/a logging.INFO
|
DJANGO_SENTRY_LOG_LEVEL SENTRY_LOG_LEVEL n/a logging.INFO
|
||||||
MAILGUN_API_KEY MAILGUN_ACCESS_KEY n/a raises error
|
MAILGUN_API_KEY MAILGUN_ACCESS_KEY n/a raises error
|
||||||
|
|
|
@ -9,8 +9,8 @@ flake8==3.7.6
|
||||||
|
|
||||||
# Testing
|
# Testing
|
||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
tox==3.8.4
|
tox==3.8.6
|
||||||
pytest==4.4.0
|
pytest==4.4.1
|
||||||
pytest_cases==1.5.1
|
pytest_cases==1.6.2
|
||||||
pytest-cookies==0.3.0
|
pytest-cookies==0.3.0
|
||||||
pyyaml==5.1
|
pyyaml==5.1
|
||||||
|
|
|
@ -11,6 +11,7 @@ PATTERN = "{{(\s?cookiecutter)[.](.*?)}}"
|
||||||
RE_OBJ = re.compile(PATTERN)
|
RE_OBJ = re.compile(PATTERN)
|
||||||
|
|
||||||
YN_CHOICES = ["y", "n"]
|
YN_CHOICES = ["y", "n"]
|
||||||
|
CLOUD_CHOICES = ["AWS", "GCE"]
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
|
@ -35,6 +36,7 @@ def context():
|
||||||
@pytest.mark.parametrize("use_sentry", YN_CHOICES, ids=lambda yn: f"sentry:{yn}")
|
@pytest.mark.parametrize("use_sentry", YN_CHOICES, ids=lambda yn: f"sentry:{yn}")
|
||||||
@pytest.mark.parametrize("use_compressor", YN_CHOICES, ids=lambda yn: f"cmpr:{yn}")
|
@pytest.mark.parametrize("use_compressor", YN_CHOICES, ids=lambda yn: f"cmpr:{yn}")
|
||||||
@pytest.mark.parametrize("use_whitenoise", YN_CHOICES, ids=lambda yn: f"wnoise:{yn}")
|
@pytest.mark.parametrize("use_whitenoise", YN_CHOICES, ids=lambda yn: f"wnoise:{yn}")
|
||||||
|
@pytest.mark.parametrize("cloud_provider", CLOUD_CHOICES, ids=lambda yn: f"cloud:{yn}")
|
||||||
def context_combination(
|
def context_combination(
|
||||||
windows,
|
windows,
|
||||||
use_docker,
|
use_docker,
|
||||||
|
@ -43,6 +45,7 @@ def context_combination(
|
||||||
use_sentry,
|
use_sentry,
|
||||||
use_compressor,
|
use_compressor,
|
||||||
use_whitenoise,
|
use_whitenoise,
|
||||||
|
cloud_provider,
|
||||||
):
|
):
|
||||||
"""Fixture that parametrize the function where it's used."""
|
"""Fixture that parametrize the function where it's used."""
|
||||||
return {
|
return {
|
||||||
|
@ -53,6 +56,7 @@ def context_combination(
|
||||||
"use_mailhog": use_mailhog,
|
"use_mailhog": use_mailhog,
|
||||||
"use_sentry": use_sentry,
|
"use_sentry": use_sentry,
|
||||||
"use_whitenoise": use_whitenoise,
|
"use_whitenoise": use_whitenoise,
|
||||||
|
"cloud_provider": cloud_provider,
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -16,13 +16,18 @@ DJANGO_SECURE_SSL_REDIRECT=False
|
||||||
MAILGUN_API_KEY=
|
MAILGUN_API_KEY=
|
||||||
DJANGO_SERVER_EMAIL=
|
DJANGO_SERVER_EMAIL=
|
||||||
MAILGUN_DOMAIN=
|
MAILGUN_DOMAIN=
|
||||||
|
{% if cookiecutter.cloud_provider == 'AWS' %}
|
||||||
# AWS
|
# AWS
|
||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
DJANGO_AWS_ACCESS_KEY_ID=
|
DJANGO_AWS_ACCESS_KEY_ID=
|
||||||
DJANGO_AWS_SECRET_ACCESS_KEY=
|
DJANGO_AWS_SECRET_ACCESS_KEY=
|
||||||
DJANGO_AWS_STORAGE_BUCKET_NAME=
|
DJANGO_AWS_STORAGE_BUCKET_NAME=
|
||||||
|
{% elif cookiecutter.cloud_provider == 'GCE' %}
|
||||||
|
# GCE
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
GOOGLE_APPLICATION_CREDENTIALS=
|
||||||
|
DJANGO_GCE_STORAGE_BUCKET_NAME=
|
||||||
|
{% endif %}
|
||||||
# django-allauth
|
# django-allauth
|
||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
DJANGO_ACCOUNT_ALLOW_REGISTRATION=True
|
DJANGO_ACCOUNT_ALLOW_REGISTRATION=True
|
||||||
|
|
|
@ -0,0 +1,9 @@
|
||||||
|
FROM garland/aws-cli-docker:1.15.47
|
||||||
|
|
||||||
|
COPY ./compose/production/aws/maintenance /usr/local/bin/maintenance
|
||||||
|
COPY ./compose/production/postgres/maintenance/_sourced /usr/local/bin/maintenance/_sourced
|
||||||
|
|
||||||
|
RUN chmod +x /usr/local/bin/maintenance/*
|
||||||
|
|
||||||
|
RUN mv /usr/local/bin/maintenance/* /usr/local/bin \
|
||||||
|
&& rmdir /usr/local/bin/maintenance
|
|
@ -0,0 +1,24 @@
|
||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
### Download a file from your Amazon S3 bucket to the postgres /backups folder
|
||||||
|
###
|
||||||
|
### Usage:
|
||||||
|
### $ docker-compose -f production.yml run --rm awscli <1>
|
||||||
|
|
||||||
|
set -o errexit
|
||||||
|
set -o pipefail
|
||||||
|
set -o nounset
|
||||||
|
|
||||||
|
working_dir="$(dirname ${0})"
|
||||||
|
source "${working_dir}/_sourced/constants.sh"
|
||||||
|
source "${working_dir}/_sourced/messages.sh"
|
||||||
|
|
||||||
|
export AWS_ACCESS_KEY_ID="${DJANGO_AWS_ACCESS_KEY_ID}"
|
||||||
|
export AWS_SECRET_ACCESS_KEY="${DJANGO_AWS_SECRET_ACCESS_KEY}"
|
||||||
|
export AWS_STORAGE_BUCKET_NAME="${DJANGO_AWS_STORAGE_BUCKET_NAME}"
|
||||||
|
|
||||||
|
|
||||||
|
aws s3 cp s3://${AWS_STORAGE_BUCKET_NAME}${BACKUP_DIR_PATH}/${1} ${BACKUP_DIR_PATH}/${1}
|
||||||
|
|
||||||
|
message_success "Finished downloading ${1}."
|
||||||
|
|
|
@ -0,0 +1,30 @@
|
||||||
|
#!/bin/sh
|
||||||
|
|
||||||
|
### Upload the /backups folder to Amazon S3
|
||||||
|
###
|
||||||
|
### Usage:
|
||||||
|
### $ docker-compose -f production.yml run --rm awscli upload
|
||||||
|
|
||||||
|
set -o errexit
|
||||||
|
set -o pipefail
|
||||||
|
set -o nounset
|
||||||
|
|
||||||
|
working_dir="$(dirname ${0})"
|
||||||
|
source "${working_dir}/_sourced/constants.sh"
|
||||||
|
source "${working_dir}/_sourced/messages.sh"
|
||||||
|
|
||||||
|
export AWS_ACCESS_KEY_ID="${DJANGO_AWS_ACCESS_KEY_ID}"
|
||||||
|
export AWS_SECRET_ACCESS_KEY="${DJANGO_AWS_SECRET_ACCESS_KEY}"
|
||||||
|
export AWS_STORAGE_BUCKET_NAME="${DJANGO_AWS_STORAGE_BUCKET_NAME}"
|
||||||
|
|
||||||
|
|
||||||
|
message_info "Upload the backups directory to S3 bucket {$AWS_STORAGE_BUCKET_NAME}"
|
||||||
|
|
||||||
|
aws s3 cp ${BACKUP_DIR_PATH} s3://${AWS_STORAGE_BUCKET_NAME}${BACKUP_DIR_PATH} --recursive
|
||||||
|
|
||||||
|
message_info "Cleaning the directory ${BACKUP_DIR_PATH}"
|
||||||
|
|
||||||
|
rm -rf ${BACKUP_DIR_PATH}/*
|
||||||
|
|
||||||
|
message_success "Finished uploading and cleaning."
|
||||||
|
|
|
@ -70,6 +70,7 @@ SECURE_CONTENT_TYPE_NOSNIFF = env.bool(
|
||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
# https://django-storages.readthedocs.io/en/latest/#installation
|
# https://django-storages.readthedocs.io/en/latest/#installation
|
||||||
INSTALLED_APPS += ["storages"] # noqa F405
|
INSTALLED_APPS += ["storages"] # noqa F405
|
||||||
|
{% if cookiecutter.cloud_provider == 'AWS' %}
|
||||||
# https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html#settings
|
# https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html#settings
|
||||||
AWS_ACCESS_KEY_ID = env("DJANGO_AWS_ACCESS_KEY_ID")
|
AWS_ACCESS_KEY_ID = env("DJANGO_AWS_ACCESS_KEY_ID")
|
||||||
# https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html#settings
|
# https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html#settings
|
||||||
|
@ -88,22 +89,27 @@ AWS_S3_OBJECT_PARAMETERS = {
|
||||||
AWS_DEFAULT_ACL = None
|
AWS_DEFAULT_ACL = None
|
||||||
# https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html#settings
|
# https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html#settings
|
||||||
AWS_S3_REGION_NAME = env("DJANGO_AWS_S3_REGION_NAME", default=None)
|
AWS_S3_REGION_NAME = env("DJANGO_AWS_S3_REGION_NAME", default=None)
|
||||||
|
{% elif cookiecutter.cloud_provider == 'GCE' %}
|
||||||
|
DEFAULT_FILE_STORAGE = "storages.backends.gcloud.GoogleCloudStorage"
|
||||||
|
GS_BUCKET_NAME = env("DJANGO_GCE_STORAGE_BUCKET_NAME")
|
||||||
|
GS_DEFAULT_ACL = "publicRead"
|
||||||
|
{% endif %}
|
||||||
|
|
||||||
# STATIC
|
# STATIC
|
||||||
# ------------------------
|
# ------------------------
|
||||||
{% if cookiecutter.use_whitenoise == 'y' -%}
|
{% if cookiecutter.use_whitenoise == 'y' -%}
|
||||||
STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage"
|
STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage"
|
||||||
{%- else %}
|
{%- endif -%}
|
||||||
|
{%- if cookiecutter.cloud_provider == 'AWS' %}
|
||||||
STATICFILES_STORAGE = "config.settings.production.StaticRootS3Boto3Storage"
|
STATICFILES_STORAGE = "config.settings.production.StaticRootS3Boto3Storage"
|
||||||
STATIC_URL = f"https://{AWS_STORAGE_BUCKET_NAME}.s3.amazonaws.com/static/"
|
STATIC_URL = f"https://{AWS_STORAGE_BUCKET_NAME}.s3.amazonaws.com/static/"
|
||||||
|
{%- elif cookiecutter.cloud_provider == 'GCE' %}
|
||||||
|
STATIC_URL = "https://storage.googleapis.com/{}/static".format(GS_BUCKET_NAME)
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
||||||
# MEDIA
|
# MEDIA
|
||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
{% if cookiecutter.use_whitenoise == 'y' -%}
|
{%- if cookiecutter.cloud_provider == 'AWS' %}
|
||||||
DEFAULT_FILE_STORAGE = "storages.backends.s3boto3.S3Boto3Storage"
|
|
||||||
MEDIA_URL = f"https://{AWS_STORAGE_BUCKET_NAME}.s3.amazonaws.com/"
|
|
||||||
{%- else %}
|
|
||||||
# region http://stackoverflow.com/questions/10390244/
|
# region http://stackoverflow.com/questions/10390244/
|
||||||
# Full-fledge class: https://stackoverflow.com/a/18046120/104731
|
# Full-fledge class: https://stackoverflow.com/a/18046120/104731
|
||||||
from storages.backends.s3boto3 import S3Boto3Storage # noqa E402
|
from storages.backends.s3boto3 import S3Boto3Storage # noqa E402
|
||||||
|
@ -121,6 +127,9 @@ class MediaRootS3Boto3Storage(S3Boto3Storage):
|
||||||
# endregion
|
# endregion
|
||||||
DEFAULT_FILE_STORAGE = "config.settings.production.MediaRootS3Boto3Storage"
|
DEFAULT_FILE_STORAGE = "config.settings.production.MediaRootS3Boto3Storage"
|
||||||
MEDIA_URL = f"https://{AWS_STORAGE_BUCKET_NAME}.s3.amazonaws.com/media/"
|
MEDIA_URL = f"https://{AWS_STORAGE_BUCKET_NAME}.s3.amazonaws.com/media/"
|
||||||
|
{%- elif cookiecutter.cloud_provider == 'GCE' %}
|
||||||
|
MEDIA_URL = "https://storage.googleapis.com/{}/media".format(GS_BUCKET_NAME)
|
||||||
|
MEDIA_ROOT = "https://storage.googleapis.com/{}/media".format(GS_BUCKET_NAME)
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
||||||
# TEMPLATES
|
# TEMPLATES
|
||||||
|
|
|
@ -65,3 +65,11 @@ services:
|
||||||
command: /start-flower
|
command: /start-flower
|
||||||
|
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
awscli:
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: ./compose/production/aws/Dockerfile
|
||||||
|
env_file:
|
||||||
|
- ./.envs/.production/.django
|
||||||
|
volumes:
|
||||||
|
- production_postgres_data_backups:/backups
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
pytz==2018.9 # https://github.com/stub42/pytz
|
pytz==2019.1 # https://github.com/stub42/pytz
|
||||||
python-slugify==3.0.2 # https://github.com/un33k/python-slugify
|
python-slugify==3.0.2 # https://github.com/un33k/python-slugify
|
||||||
Pillow==6.0.0 # https://github.com/python-pillow/Pillow
|
Pillow==6.0.0 # https://github.com/python-pillow/Pillow
|
||||||
{%- if cookiecutter.use_compressor == "y" %}
|
{%- if cookiecutter.use_compressor == "y" %}
|
||||||
|
|
|
@ -1,18 +1,18 @@
|
||||||
-r ./base.txt
|
-r ./base.txt
|
||||||
|
|
||||||
Werkzeug==0.15.1 # https://github.com/pallets/werkzeug
|
Werkzeug==0.15.2 # https://github.com/pallets/werkzeug
|
||||||
ipdb==0.12 # https://github.com/gotcha/ipdb
|
ipdb==0.12 # https://github.com/gotcha/ipdb
|
||||||
Sphinx==2.0.0 # https://github.com/sphinx-doc/sphinx
|
Sphinx==2.0.1 # https://github.com/sphinx-doc/sphinx
|
||||||
{%- if cookiecutter.use_docker == 'y' %}
|
{%- if cookiecutter.use_docker == 'y' %}
|
||||||
psycopg2==2.7.4 --no-binary psycopg2 # https://github.com/psycopg/psycopg2
|
psycopg2==2.8 --no-binary psycopg2 # https://github.com/psycopg/psycopg2
|
||||||
{%- else %}
|
{%- else %}
|
||||||
psycopg2-binary==2.7.7 # https://github.com/psycopg/psycopg2
|
psycopg2-binary==2.8 # https://github.com/psycopg/psycopg2
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
||||||
# Testing
|
# Testing
|
||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
mypy==0.670 # https://github.com/python/mypy
|
mypy==0.700 # https://github.com/python/mypy
|
||||||
pytest==4.4.0 # https://github.com/pytest-dev/pytest
|
pytest==4.4.1 # https://github.com/pytest-dev/pytest
|
||||||
pytest-sugar==0.9.2 # https://github.com/Frozenball/pytest-sugar
|
pytest-sugar==0.9.2 # https://github.com/Frozenball/pytest-sugar
|
||||||
|
|
||||||
# Code quality
|
# Code quality
|
||||||
|
|
|
@ -3,15 +3,19 @@
|
||||||
-r ./base.txt
|
-r ./base.txt
|
||||||
|
|
||||||
gunicorn==19.9.0 # https://github.com/benoitc/gunicorn
|
gunicorn==19.9.0 # https://github.com/benoitc/gunicorn
|
||||||
psycopg2==2.7.4 --no-binary psycopg2 # https://github.com/psycopg/psycopg2
|
psycopg2==2.8 --no-binary psycopg2 # https://github.com/psycopg/psycopg2
|
||||||
{%- if cookiecutter.use_whitenoise == 'n' %}
|
{%- if cookiecutter.use_whitenoise == 'n' %}
|
||||||
Collectfast==0.6.2 # https://github.com/antonagestam/collectfast
|
Collectfast==0.6.2 # https://github.com/antonagestam/collectfast
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
{%- if cookiecutter.use_sentry == "y" %}
|
{%- if cookiecutter.use_sentry == "y" %}
|
||||||
sentry-sdk==0.7.9 # https://github.com/getsentry/sentry-python
|
sentry-sdk==0.7.10 # https://github.com/getsentry/sentry-python
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
||||||
# Django
|
# Django
|
||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
|
{%- if cookiecutter.cloud_provider == 'AWS' %}
|
||||||
django-storages[boto3]==1.7.1 # https://github.com/jschneier/django-storages
|
django-storages[boto3]==1.7.1 # https://github.com/jschneier/django-storages
|
||||||
|
{%- elif cookiecutter.cloud_provider == 'GCE' %}
|
||||||
|
django-storages[google]==1.7.1 # https://github.com/jschneier/django-storages
|
||||||
|
{%- endif %}
|
||||||
django-anymail[mailgun]==6.0 # https://github.com/anymail/django-anymail
|
django-anymail[mailgun]==6.0 # https://github.com/anymail/django-anymail
|
||||||
|
|
Loading…
Reference in New Issue
Block a user