diff --git a/.pyup.yml b/.pyup.yml new file mode 100644 index 000000000..4978524e8 --- /dev/null +++ b/.pyup.yml @@ -0,0 +1,17 @@ +# configure updates globally +# default: all +# allowed: all, insecure, False +update: all + +# configure dependency pinning globally +# default: True +# allowed: True, False +pin: True + +# Specify requirement files by hand, pyup seems to struggle to +# find the ones in the project_slug folder +requirements: + - "requirements.txt" + - "{{cookiecutter.project_slug}}/requirements/base.txt" + - "{{cookiecutter.project_slug}}/requirements/local.txt" + - "{{cookiecutter.project_slug}}/requirements/production.txt" diff --git a/.travis.yml b/.travis.yml index a46726d6c..a74090b28 100644 --- a/.travis.yml +++ b/.travis.yml @@ -7,16 +7,20 @@ language: python python: 3.6 -env: - - TOX_ENV=py36 - before_install: - docker-compose -v - docker -v -script: - - tox -e $TOX_ENV - - sh tests/test_docker.sh +matrix: + include: + - name: Tox Test + script: tox -e py36 + - name: Black template + script: tox -e black + - name: Basic Docker + script: sh tests/test_docker.sh + - name: Docker with Celery + script: sh tests/test_docker.sh use_celery=y install: - pip install tox diff --git a/CONTRIBUTORS.rst b/CONTRIBUTORS.rst index d0b76ea20..b4b96e871 100644 --- a/CONTRIBUTORS.rst +++ b/CONTRIBUTORS.rst @@ -46,7 +46,7 @@ Listed in alphabetical order. Aaron Eikenberry `@aeikenberry`_ Adam Bogdał `@bogdal`_ Adam Dobrawy `@ad-m`_ - Adam Steele `@adammsteele` + Adam Steele `@adammsteele`_ Agam Dua Alberto Sanchez `@alb3rto`_ Alex Tsai `@caffodian`_ @@ -57,19 +57,21 @@ Listed in alphabetical order. Andrew Mikhnevich `@zcho`_ Andy Rose Anna Callahan `@jazztpt`_ + Anna Sidwell `@takkaria`_ Antonia Blair `@antoniablair`_ @antoniablairart Anuj Bansal `@ahhda`_ Arcuri Davide `@dadokkio`_ Areski Belaid `@areski`_ Ashley Camba Barclay Gauld `@yunti`_ - Ben Warren `@bwarren2` + Ben Warren `@bwarren2`_ Ben Lopatin Benjamin Abel Bert de Miranda `@bertdemiranda`_ Bo Lopker `@blopker`_ Bouke Haarsma Brent Payne `@brentpayne`_ @brentpayne + Bartek `@btknu`_ Burhan Khalid            `@burhan`_                   @burhan Carl Johnson `@carlmjohnson`_ @carlmjohnson Catherine Devlin `@catherinedevlin`_ @@ -91,6 +93,7 @@ Listed in alphabetical order. Davit Tovmasyan `@davitovmasyan`_ Davur Clementsen `@dsclementsen`_ @davur Delio Castillo `@jangeador`_ @jangeador + Demetris Stavrou `@demestav`_ Denis Orehovsky `@apirobot`_ Dónal Adams `@epileptic-fish`_ Diane Chen `@purplediane`_ @purplediane88 @@ -103,6 +106,7 @@ Listed in alphabetical order. Garry Cairns `@garry-cairns`_ Garry Polley `@garrypolley`_ Hamish Durkin `@durkode`_ + Hana Quadara `@hanaquadara`_ Harry Percival `@hjwp`_ Hendrik Schneider `@hendrikschneider`_ Henrique G. G. Pereira `@ikkebr`_ @@ -110,6 +114,7 @@ Listed in alphabetical order. Irfan Ahmad `@erfaan`_ @erfaan Jan Van Bruggen `@jvanbrug`_ Jens Nilsson `@phiberjenz`_ + Jelmer Draaijer `@foarsitter`_ Jerome Leclanche `@jleclanche`_ @Adys Jimmy Gitonga `@afrowave`_ @afrowave John Cass `@jcass77`_ @cass_john @@ -143,6 +148,7 @@ Listed in alphabetical order. Mesut Yılmaz `@myilmaz`_ Michael Gecht `@mimischi`_ @_mischi mozillazg `@mozillazg`_ + Oleg Russkin `@rolep`_ Pablo `@oubiga`_ Parbhat Puri `@parbhat`_ Peter Bittner `@bittner`_ @@ -155,7 +161,7 @@ Listed in alphabetical order. Roman Afanaskin `@siauPatrick`_ Roman Osipenko `@romanosipenko`_ Russell Davies - Sascha `@saschalalala` @saschalalala + Sascha `@saschalalala`_ @saschalalala Sam Collins `@MightySCollins`_ Shupeyko Nikita `@webyneter`_ Sławek Ehlert `@slafs`_ @@ -180,6 +186,7 @@ Listed in alphabetical order. Denis Bobrov `@delneg`_ Philipp Matthies `@canonnervio`_ Vadim Iskuchekov `@Egregors`_ @egregors + Keith Bailey `@keithjeb`_ ========================== ============================ ============== .. _@a7p: https://github.com/a7p @@ -214,6 +221,7 @@ Listed in alphabetical order. .. _@Collederas: https://github.com/Collederas .. _@davitovmasyan: https://github.com/davitovmasyan .. _@ddiazpinto: https://github.com/ddiazpinto +.. _@demestav: https://github.com/demestav .. _@dezoito: https://github.com/dezoito .. _@dhepper: https://github.com/dhepper .. _@dot2dotseurat: https://github.com/dot2dotseurat @@ -225,6 +233,7 @@ Listed in alphabetical order. .. _@eriol: https://github.com/eriol .. _@eyadsibai: https://github.com/eyadsibai .. _@flyudvik: https://github.com/flyudvik +.. _@foarsitter: https://github.com/foarsitter .. _@garry-cairns: https://github.com/garry-cairns .. _@garrypolley: https://github.com/garrypolley .. _@goldhand: https://github.com/goldhand @@ -272,6 +281,7 @@ Listed in alphabetical order. .. _@ssteinerX: https://github.com/ssteinerx .. _@stepmr: https://github.com/stepmr .. _@suledev: https://github.com/suledev +.. _@takkaria: https://github.com/takkaria .. _@timfreund: https://github.com/timfreund .. _@Travistock: https://github.com/Tavistock .. _@trungdong: https://github.com/trungdong @@ -297,6 +307,17 @@ Listed in alphabetical order. .. _@purplediane: https://github.com/purplediane .. _@umrashrf: https://github.com/umrashrf .. _@ahhda: https://github.com/ahhda +.. _@keithjeb: https://github.com/keithjeb +.. _@btknu: https://github.com/btknu +.. _@rolep: https://github.com/rolep +.. _@canonnervio: https://github.com/canonnervio +.. _@jcass77: https://github.com/jcass77 +.. _@Egregors: https://github.com/Egregors +.. _@saschalalala: https://github.com/saschalalala +.. _@mrcoles: https://github.com/mrcoles +.. _@ericgroom: https://github.com/ericgroom +.. _@hanaquadara: https://github.com/hanaquadara + Special Thanks ~~~~~~~~~~~~~~ diff --git a/README.rst b/README.rst index b9e71ace2..2c037e51f 100644 --- a/README.rst +++ b/README.rst @@ -48,7 +48,7 @@ Features * Optional custom static build using Gulp and livereload * Send emails via Anymail_ (using Mailgun_ by default, but switchable) * Media storage using Amazon S3 -* Docker support using docker-compose_ for development and production (using Caddy_ with LetsEncrypt_ support) +* Docker support using docker-compose_ for development and production (using Traefik_ with LetsEncrypt_ support) * Procfile_ for deploying to Heroku * Instructions for deploying to PythonAnywhere_ * Run tests with unittest or py.test @@ -82,7 +82,7 @@ Optional Integrations .. _Sentry: https://sentry.io/welcome/ .. _docker-compose: https://github.com/docker/compose .. _PythonAnywhere: https://www.pythonanywhere.com/ -.. _Caddy: https://caddyserver.com/ +.. _Traefik: https://traefik.io/ .. _LetsEncrypt: https://letsencrypt.org/ Constraints @@ -279,9 +279,9 @@ experience better. Articles --------- +* `Using cookiecutter-django with Google Cloud Storage`_ - Mar. 12, 2019 * `cookiecutter-django with Nginx, Route 53 and ELB`_ - Feb. 12, 2018 * `cookiecutter-django and Amazon RDS`_ - Feb. 7, 2018 -* `Deploying Cookiecutter-Django with Docker-Compose`_ - Oct. 19, 2017 * `Using Cookiecutter to Jumpstart a Django Project on Windows with PyCharm`_ - May 19, 2017 * `Exploring with Cookiecutter`_ - Dec. 3, 2016 * `Introduction to Cookiecutter-Django`_ - Feb. 19, 2016 @@ -292,9 +292,9 @@ Articles Have a blog or online publication? Write about your cookiecutter-django tips and tricks, then send us a pull request with the link. +.. _`Using cookiecutter-django with Google Cloud Storage`: https://ahhda.github.io/cloud/gce/django/2019/03/12/using-django-cookiecutter-cloud-storage.html .. _`cookiecutter-django with Nginx, Route 53 and ELB`: https://msaizar.com/blog/cookiecutter-django-nginx-route-53-and-elb/ .. _`cookiecutter-django and Amazon RDS`: https://msaizar.com/blog/cookiecutter-django-and-amazon-rds/ -.. _`Deploying Cookiecutter-Django with Docker-Compose`: http://adamantine.me/2017/10/19/deploying-cookiecutter-django-with-docker-compose/ .. _`Exploring with Cookiecutter`: http://www.snowboardingcoder.com/django/2016/12/03/exploring-with-cookiecutter/ .. _`Using Cookiecutter to Jumpstart a Django Project on Windows with PyCharm`: https://joshuahunter.com/posts/using-cookiecutter-to-jumpstart-a-django-project-on-windows-with-pycharm/ diff --git a/cookiecutter.json b/cookiecutter.json index b5dda0c70..eabf7d142 100644 --- a/cookiecutter.json +++ b/cookiecutter.json @@ -32,6 +32,10 @@ "None", "Gulp" ], + "cloud_provider": [ + "AWS", + "GCE" + ], "custom_bootstrap_compilation": "n", "use_compressor": "n", "use_celery": "n", diff --git a/docs/deployment-on-heroku.rst b/docs/deployment-on-heroku.rst index f753aa5a4..4a16123a0 100644 --- a/docs/deployment-on-heroku.rst +++ b/docs/deployment-on-heroku.rst @@ -3,6 +3,9 @@ Deployment on Heroku .. index:: Heroku +Commands to run +--------------- + Run these commands to deploy the project to Heroku: .. code-block:: bash @@ -17,11 +20,8 @@ Run these commands to deploy the project to Heroku: heroku addons:create heroku-redis:hobby-dev - # If using mailgun: heroku addons:create mailgun:starter - heroku addons:create sentry:f1 - heroku config:set PYTHONHASHSEED=random heroku config:set WEB_CONCURRENCY=4 @@ -47,10 +47,75 @@ Run these commands to deploy the project to Heroku: git push heroku master - heroku run python manage.py migrate heroku run python manage.py createsuperuser - heroku run python manage.py collectstatic --no-input heroku run python manage.py check --deploy heroku open + + +.. warning:: + + .. include:: mailgun.rst + + +Optional actions +---------------- + +Celery +++++++ + +Celery requires a few extra environment variables to be ready operational. Also, the worker is created, +it's in the ``Procfile``, but is turned off by default: + +.. code-block:: bash + + # Set the broker URL to Redis + heroku config:set CELERY_BROKER_URL=`heroku config:get REDIS_URL` + # Scale dyno to 1 instance + heroku ps:scale worker=1 + +Sentry +++++++ + +If you're opted for Sentry error tracking, you can either install it through the `Sentry add-on`_: + +.. code-block:: bash + + heroku addons:create sentry:f1 + + +Or add the DSN for your account, if you already have one: + +.. code-block:: bash + + heroku config:set SENTRY_DSN=https://xxxx@sentry.io/12345 + +.. _Sentry add-on: https://elements.heroku.com/addons/sentry + + +Gulp & Bootstrap compilation +++++++++++++++++++++++++++++ + +If you've opted for a custom bootstrap build, you'll most likely need to setup +your app to use `multiple buildpacks`_: one for Python & one for Node.js: + +.. code-block:: bash + + heroku buildpacks:add --index 1 heroku/nodejs + +At time of writing, this should do the trick: during deployment, +the Heroku should run ``npm install`` and then ``npm build``, +which runs Gulp in cookiecutter-django. + +If things don't work, please refer to the Heroku docs. + +.. _multiple buildpacks: https://devcenter.heroku.com/articles/using-multiple-buildpacks-for-an-app + +About Heroku & Docker +--------------------- + +Although Heroku has some sort of `Docker support`_, it's not supported by cookiecutter-django. +We invite you to follow Heroku documentation about it. + +.. _Docker support: https://devcenter.heroku.com/articles/build-docker-images-heroku-yml diff --git a/docs/deployment-with-docker.rst b/docs/deployment-with-docker.rst index f6e21e828..aad549327 100644 --- a/docs/deployment-with-docker.rst +++ b/docs/deployment-with-docker.rst @@ -7,8 +7,8 @@ Deployment with Docker Prerequisites ------------- -* Docker 1.10+. -* Docker Compose 1.6+ +* Docker 17.05+. +* Docker Compose 1.17+ Understanding the Docker Compose Setup @@ -19,7 +19,7 @@ Before you begin, check out the ``production.yml`` file in the root of this proj * ``django``: your application running behind ``Gunicorn``; * ``postgres``: PostgreSQL database with the application's relational data; * ``redis``: Redis instance for caching; -* ``caddy``: Caddy web server with HTTPS on by default. +* ``traefik``: Traefik reverse proxy with HTTPS on by default. Provided you have opted for Celery (via setting ``use_celery`` to ``y``) there are three more services: @@ -43,6 +43,11 @@ You will probably also need to setup the Mail backend, for example by adding a ` .. _Mailgun: https://mailgun.com +.. warning:: + + .. include:: mailgun.rst + + Optional: Use AWS IAM Role for EC2 instance ------------------------------------------- @@ -63,11 +68,11 @@ It is always better to deploy a site behind HTTPS and will become crucial as the * Access to the Django admin is set up by default to require HTTPS in production or once *live*. -The Caddy web server used in the default configuration will get you a valid certificate from Lets Encrypt and update it automatically. All you need to do to enable this is to make sure that your DNS records are pointing to the server Caddy runs on. +The Traefik reverse proxy used in the default configuration will get you a valid certificate from Lets Encrypt and update it automatically. All you need to do to enable this is to make sure that your DNS records are pointing to the server Traefik runs on. -You can read more about this here at `Automatic HTTPS`_ in the Caddy docs. +You can read more about this feature and how to configure it, at `Automatic HTTPS`_ in the Traefik docs. -.. _Automatic HTTPS: https://caddyserver.com/docs/automatic-https +.. _Automatic HTTPS: https://docs.traefik.io/configuration/acme/ (Optional) Postgres Data Volume Modifications @@ -112,7 +117,7 @@ If you want to scale your application, run:: docker-compose -f production.yml scale django=4 docker-compose -f production.yml scale celeryworker=2 -.. warning:: don't try to scale ``postgres``, ``celerybeat``, or ``caddy``. +.. warning:: don't try to scale ``postgres``, ``celerybeat``, or ``traefik``. To see how your containers are doing run:: @@ -144,3 +149,4 @@ Move it to ``/etc/supervisor/conf.d/{{cookiecutter.project_slug}}.conf`` and run For status check, run:: supervisorctl status + diff --git a/docs/developing-locally-docker.rst b/docs/developing-locally-docker.rst index 08b25f3bc..da4e67aa7 100644 --- a/docs/developing-locally-docker.rst +++ b/docs/developing-locally-docker.rst @@ -17,17 +17,6 @@ Prerequisites .. _`installation guide`: https://docs.docker.com/compose/install/ -Attention, Windows Users ------------------------- - -Currently PostgreSQL (``psycopg2`` python package) is not installed inside Docker containers for Windows users, while it is required by the generated Django project. To fix this, add ``psycopg2`` to the list of requirements inside ``requirements/base.txt``:: - - # Python-PostgreSQL Database Adapter - psycopg2==2.6.2 - -Doing this will prevent the project from being installed in an Windows-only environment (thus without usage of Docker). If you want to use this project without Docker, make sure to remove ``psycopg2`` from the requirements again. - - Build the Stack --------------- @@ -105,7 +94,6 @@ The most important thing for us here now is ``env_file`` section enlisting ``./. │   ├── .django │   └── .postgres └── .production - ├── .caddy ├── .django └── .postgres @@ -120,7 +108,7 @@ Consider the aforementioned ``.envs/.local/.postgres``: :: POSTGRES_USER=XgOWtQtJecsAbaIyslwGvFvPawftNaqO POSTGRES_PASSWORD=jSljDz4whHuwO3aJIgVBrqEml5Ycbghorep4uVJ4xjDYQu0LfuTZdctj7y0YcCLu -The three envs we are presented with here are ``POSTGRES_DB``, ``POSTGRES_USER``, and ``POSTGRES_PASSWORD`` (by the way, their values have also been generated for you). You might have figured out already where these definitions will end up; it's all the same with ``django`` and ``caddy`` service container envs. +The three envs we are presented with here are ``POSTGRES_DB``, ``POSTGRES_USER``, and ``POSTGRES_PASSWORD`` (by the way, their values have also been generated for you). You might have figured out already where these definitions will end up; it's all the same with ``django`` service container envs. One final touch: should you ever need to merge ``.envs/production/*`` in a single ``.env`` run the ``merge_production_dotenvs_in_dotenv.py``: :: @@ -171,6 +159,16 @@ When developing locally you can go with MailHog_ for email testing provided ``us .. _Mailhog: https://github.com/mailhog/MailHog/ +.. _`CeleryTasks`: + +Celery tasks in local development +~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~ +When not using docker Celery tasks are set to run in Eager mode, so that a full stack is not needed. When using docker the task +scheduler will be used by default. + +If you need tasks to be executed on the main thread during development set CELERY_TASK_ALWAYS_EAGER = True in config/settings/local.py. + +Possible uses could be for testing, or ease of profiling with DJDT. .. _`CeleryFlower`: diff --git a/docs/developing-locally.rst b/docs/developing-locally.rst index 09c5db396..3434f68b6 100644 --- a/docs/developing-locally.rst +++ b/docs/developing-locally.rst @@ -118,6 +118,16 @@ In production, we have Mailgun_ configured to have your back! .. _Mailgun: https://www.mailgun.com/ +Celery +------ +If the project is configured to use Celery as a task scheduler then by default tasks are set to run on the main thread +when developing locally. If you have the appropriate setup on your local machine then set + +CELERY_TASK_ALWAYS_EAGER = False + +in /config/settings/local.py + + Sass Compilation & Live Reloading --------------------------------- diff --git a/docs/docker-postgres-backups.rst b/docs/docker-postgres-backups.rst index c1a8a5e0c..6ccb7cf1e 100644 --- a/docs/docker-postgres-backups.rst +++ b/docs/docker-postgres-backups.rst @@ -85,3 +85,11 @@ You will see something like :: # ... ALTER TABLE SUCCESS: The 'my_project' database has been restored from the '/backups/backup_2018_03_13T09_05_07.sql.gz' backup. + + +Backup to Amazon S3 +---------------------------------- +For uploading your backups to Amazon S3 you can use the aws cli container. There is an upload command for uploading the postgres /backups directory recursively and there is a download command for downloading a specific backup. The default S3 environment variables are used. :: + + $ docker-compose -f production.yml run --rm awscli upload + $ docker-compose -f production.yml run --rm awscli download backup_2018_03_13T09_05_07.sql.gz diff --git a/docs/linters.rst b/docs/linters.rst index e59ff0df7..2d6232181 100644 --- a/docs/linters.rst +++ b/docs/linters.rst @@ -25,7 +25,7 @@ This is included in flake8's checks, but you can also run it separately to see a The config for pylint is located in .pylintrc. It specifies: -* Use the pylint_common and pylint_django plugins. If using Celery, also use pylint_celery. +* Use the pylint_django plugin. If using Celery, also use pylint_celery. * Set max line length to 120 chars * Disable linting messages for missing docstring and invalid name * max-parents=13 diff --git a/docs/mailgun.rst b/docs/mailgun.rst new file mode 100644 index 000000000..1f34e3c86 --- /dev/null +++ b/docs/mailgun.rst @@ -0,0 +1,13 @@ +If your email server used to send email isn't configured properly (Mailgun by default), +attempting to send an email will cause an Internal Server Error. + +By default, django-allauth is setup to `have emails verifications mandatory`_, +which means it'll send a verification email when an unverified user tries to +log-in or when someone tries to sign-up. + +This may happen just after you've setup your Mailgun account, which is running in a +sandbox subdomain by default. Either add your email to the list of authorized recipients +or verify your domain. + + +.. _have emails verifications mandatory: https://django-allauth.readthedocs.io/en/latest/configuration.html?highlight=ACCOUNT_EMAIL_VERIFICATION diff --git a/docs/settings.rst b/docs/settings.rst index 26b161a09..212527cfd 100644 --- a/docs/settings.rst +++ b/docs/settings.rst @@ -44,8 +44,8 @@ CELERY_BROKER_URL CELERY_BROKER_URL auto w/ Dock DJANGO_AWS_ACCESS_KEY_ID AWS_ACCESS_KEY_ID n/a raises error DJANGO_AWS_SECRET_ACCESS_KEY AWS_SECRET_ACCESS_KEY n/a raises error DJANGO_AWS_STORAGE_BUCKET_NAME AWS_STORAGE_BUCKET_NAME n/a raises error +DJANGO_AWS_S3_REGION_NAME AWS_S3_REGION_NAME n/a None SENTRY_DSN SENTRY_DSN n/a raises error -DJANGO_SENTRY_CLIENT SENTRY_CLIENT n/a raven.contrib.django.raven_compat.DjangoClient DJANGO_SENTRY_LOG_LEVEL SENTRY_LOG_LEVEL n/a logging.INFO MAILGUN_API_KEY MAILGUN_ACCESS_KEY n/a raises error MAILGUN_DOMAIN MAILGUN_SENDER_DOMAIN n/a raises error diff --git a/docs/troubleshooting.rst b/docs/troubleshooting.rst index d0c0ba434..68db2fb08 100644 --- a/docs/troubleshooting.rst +++ b/docs/troubleshooting.rst @@ -11,5 +11,7 @@ This page contains some advice about errors and problems commonly encountered du #. New apps not getting created in project root: This is the expected behavior, because cookiecutter-django does not change the way that django startapp works, you'll have to fix this manually (see `#1725`_) +#. .. include:: mailgun.rst + .. _#528: https://github.com/pydanny/cookiecutter-django/issues/528#issuecomment-212650373 .. _#1725: https://github.com/pydanny/cookiecutter-django/issues/1725#issuecomment-407493176 diff --git a/hooks/post_gen_project.py b/hooks/post_gen_project.py index 45435dd02..ab05b375a 100644 --- a/hooks/post_gen_project.py +++ b/hooks/post_gen_project.py @@ -32,10 +32,7 @@ DEBUG_VALUE = "debug" def remove_open_source_files(): - file_names = [ - "CONTRIBUTORS.txt", - "LICENSE", - ] + file_names = ["CONTRIBUTORS.txt", "LICENSE"] for file_name in file_names: os.remove(file_name) @@ -71,7 +68,10 @@ def remove_utility_files(): def remove_heroku_files(): file_names = ["Procfile", "runtime.txt", "requirements.txt"] for file_name in file_names: - if file_name == "requirements.txt" and "{{ cookiecutter.use_travisci }}".lower() == "y": + if ( + file_name == "requirements.txt" + and "{{ cookiecutter.use_travisci }}".lower() == "y" + ): # don't remove the file if we are using travisci but not using heroku continue os.remove(file_name) @@ -183,11 +183,7 @@ def generate_postgres_user(debug=False): def set_postgres_user(file_path, value): - postgres_user = set_flag( - file_path, - "!!!SET POSTGRES_USER!!!", - value=value, - ) + postgres_user = set_flag(file_path, "!!!SET POSTGRES_USER!!!", value=value) return postgres_user @@ -205,9 +201,7 @@ def set_postgres_password(file_path, value=None): def set_celery_flower_user(file_path, value): celery_flower_user = set_flag( - file_path, - "!!!SET CELERY_FLOWER_USER!!!", - value=value, + file_path, "!!!SET CELERY_FLOWER_USER!!!", value=value ) return celery_flower_user @@ -230,11 +224,7 @@ def append_to_gitignore_file(s): gitignore_file.write(os.linesep) -def set_flags_in_envs( - postgres_user, - celery_flower_user, - debug=False, -): +def set_flags_in_envs(postgres_user, celery_flower_user, debug=False): local_django_envs_path = os.path.join(".envs", ".local", ".django") production_django_envs_path = os.path.join(".envs", ".production", ".django") local_postgres_envs_path = os.path.join(".envs", ".local", ".postgres") @@ -244,14 +234,22 @@ def set_flags_in_envs( set_django_admin_url(production_django_envs_path) set_postgres_user(local_postgres_envs_path, value=postgres_user) - set_postgres_password(local_postgres_envs_path, value=DEBUG_VALUE if debug else None) + set_postgres_password( + local_postgres_envs_path, value=DEBUG_VALUE if debug else None + ) set_postgres_user(production_postgres_envs_path, value=postgres_user) - set_postgres_password(production_postgres_envs_path, value=DEBUG_VALUE if debug else None) + set_postgres_password( + production_postgres_envs_path, value=DEBUG_VALUE if debug else None + ) set_celery_flower_user(local_django_envs_path, value=celery_flower_user) - set_celery_flower_password(local_django_envs_path, value=DEBUG_VALUE if debug else None) + set_celery_flower_password( + local_django_envs_path, value=DEBUG_VALUE if debug else None + ) set_celery_flower_user(production_django_envs_path, value=celery_flower_user) - set_celery_flower_password(production_django_envs_path, value=DEBUG_VALUE if debug else None) + set_celery_flower_password( + production_django_envs_path, value=DEBUG_VALUE if debug else None + ) def set_flags_in_settings_files(): @@ -269,6 +267,10 @@ def remove_celery_compose_dirs(): shutil.rmtree(os.path.join("compose", "production", "django", "celery")) +def remove_node_dockerfile(): + shutil.rmtree(os.path.join("compose", "local", "node")) + + def main(): debug = "{{ cookiecutter.debug }}".lower() == "y" @@ -302,8 +304,8 @@ def main(): if "{{ cookiecutter.keep_local_envs_in_vcs }}".lower() == "y": print( INFO + ".env(s) are only utilized when Docker Compose and/or " - "Heroku support is enabled so keeping them does not " - "make sense given your current setup." + TERMINATOR + "Heroku support is enabled so keeping them does not " + "make sense given your current setup." + TERMINATOR ) remove_envs_and_associated_files() else: @@ -315,21 +317,8 @@ def main(): if "{{ cookiecutter.js_task_runner}}".lower() == "none": remove_gulp_files() remove_packagejson_file() - if ( - "{{ cookiecutter.js_task_runner }}".lower() != "none" - and "{{ cookiecutter.use_docker }}".lower() == "y" - ): - print( - WARNING - + "Docker and {} JS task runner ".format( - "{{ cookiecutter.js_task_runner }}".lower().capitalize() - ) - + "working together not supported yet. " - "You can continue using the generated project like you " - "normally would, however you would need to add a JS " - "task runner service to your Docker Compose configuration " - "manually." + TERMINATOR - ) + if "{{ cookiecutter.use_docker }}".lower() == "y": + remove_node_dockerfile() if "{{ cookiecutter.use_celery }}".lower() == "n": remove_celery_app() diff --git a/hooks/pre_gen_project.py b/hooks/pre_gen_project.py index b7f4dfbbb..e7bc1e39a 100644 --- a/hooks/pre_gen_project.py +++ b/hooks/pre_gen_project.py @@ -18,19 +18,20 @@ SUCCESS = "\x1b[1;32m [SUCCESS]: " project_slug = "{{ cookiecutter.project_slug }}" if hasattr(project_slug, "isidentifier"): - assert project_slug.isidentifier(), "'{}' project slug is not a valid Python identifier.".format( - project_slug - ) + assert ( + project_slug.isidentifier() + ), "'{}' project slug is not a valid Python identifier.".format(project_slug) -assert "\\" not in "{{ cookiecutter.author_name }}", "Don't include backslashes in author name." +assert ( + "\\" not in "{{ cookiecutter.author_name }}" +), "Don't include backslashes in author name." if "{{ cookiecutter.use_docker }}".lower() == "n": python_major_version = sys.version_info[0] if python_major_version == 2: print( - WARNING + "Cookiecutter Django does not support Python 2. " - "Stability is guaranteed with Python 3.6+ only, " - "are you sure you want to proceed (y/n)? " + TERMINATOR + WARNING + "You're running cookiecutter under Python 2, but the generated " + "project requires Python 3.6+. Do you want to proceed (y/n)? " + TERMINATOR ) yes_options, no_options = frozenset(["y"]), frozenset(["n"]) while True: diff --git a/requirements.txt b/requirements.txt index 90870656e..25b0135b4 100644 --- a/requirements.txt +++ b/requirements.txt @@ -4,10 +4,13 @@ binaryornot==0.4.4 # Code quality # ------------------------------------------------------------------------------ -flake8==3.6.0 +black==19.3b0 +flake8==3.7.6 # Testing # ------------------------------------------------------------------------------ -tox==3.6.1 -pytest==4.1.1 +tox==3.8.6 +pytest==4.4.0 +pytest_cases==1.6.2 pytest-cookies==0.3.0 +pyyaml==5.1 diff --git a/tests/test_cookiecutter_generation.py b/tests/test_cookiecutter_generation.py index cf173576b..17375b1cf 100755 --- a/tests/test_cookiecutter_generation.py +++ b/tests/test_cookiecutter_generation.py @@ -1,13 +1,18 @@ import os import re -import sh import pytest +from pytest_cases import pytest_fixture_plus +import sh +import yaml from binaryornot.check import is_binary PATTERN = "{{(\s?cookiecutter)[.](.*?)}}" RE_OBJ = re.compile(PATTERN) +YN_CHOICES = ["y", "n"] +CLOUD_CHOICES = ["AWS", "GCE"] + @pytest.fixture def context(): @@ -23,6 +28,38 @@ def context(): } +@pytest_fixture_plus +@pytest.mark.parametrize("windows", YN_CHOICES, ids=lambda yn: f"win:{yn}") +@pytest.mark.parametrize("use_docker", YN_CHOICES, ids=lambda yn: f"docker:{yn}") +@pytest.mark.parametrize("use_celery", YN_CHOICES, ids=lambda yn: f"celery:{yn}") +@pytest.mark.parametrize("use_mailhog", YN_CHOICES, ids=lambda yn: f"mailhog:{yn}") +@pytest.mark.parametrize("use_sentry", YN_CHOICES, ids=lambda yn: f"sentry:{yn}") +@pytest.mark.parametrize("use_compressor", YN_CHOICES, ids=lambda yn: f"cmpr:{yn}") +@pytest.mark.parametrize("use_whitenoise", YN_CHOICES, ids=lambda yn: f"wnoise:{yn}") +@pytest.mark.parametrize("cloud_provider", CLOUD_CHOICES, ids=lambda yn: f"cloud:{yn}") +def context_combination( + windows, + use_docker, + use_celery, + use_mailhog, + use_sentry, + use_compressor, + use_whitenoise, + cloud_provider, +): + """Fixture that parametrize the function where it's used.""" + return { + "windows": windows, + "use_docker": use_docker, + "use_compressor": use_compressor, + "use_celery": use_celery, + "use_mailhog": use_mailhog, + "use_sentry": use_sentry, + "use_whitenoise": use_whitenoise, + "cloud_provider": cloud_provider, + } + + def build_files_list(root_dir): """Build a list containing absolute paths to the generated files.""" return [ @@ -47,8 +84,13 @@ def check_paths(paths): assert match is None, msg.format(path) -def test_default_configuration(cookies, context): - result = cookies.bake(extra_context=context) +def test_project_generation(cookies, context, context_combination): + """ + Test that project is generated and fully rendered. + + This is parametrized for each combination from ``context_combination`` fixture + """ + result = cookies.bake(extra_context={**context, **context_combination}) assert result.exit_code == 0 assert result.exception is None assert result.project.basename == context["project_slug"] @@ -59,29 +101,36 @@ def test_default_configuration(cookies, context): check_paths(paths) -@pytest.fixture(params=["use_mailhog", "use_celery", "windows"]) -def feature_context(request, context): - context.update({request.param: "y"}) - return context +def test_linting_passes(cookies, context_combination): + """ + Generated project should pass flake8 & black. - -def test_enabled_features(cookies, feature_context): - result = cookies.bake(extra_context=feature_context) - assert result.exit_code == 0 - assert result.exception is None - assert result.project.basename == feature_context["project_slug"] - assert result.project.isdir() - - paths = build_files_list(str(result.project)) - assert paths - check_paths(paths) - - -def test_flake8_compliance(cookies): - """generated project should pass flake8""" - result = cookies.bake() + This is parametrized for each combination from ``context_combination`` fixture + """ + result = cookies.bake(extra_context=context_combination) try: sh.flake8(str(result.project)) except sh.ErrorReturnCode as e: pytest.fail(e) + + try: + sh.black("--check", "--diff", "--exclude", "migrations", f"{result.project}/") + except sh.ErrorReturnCode as e: + pytest.fail(e) + + +def test_travis_invokes_pytest(cookies, context): + context.update({"use_travisci": "y"}) + result = cookies.bake(extra_context=context) + + assert result.exit_code == 0 + assert result.exception is None + assert result.project.basename == context["project_slug"] + assert result.project.isdir() + + with open(f"{result.project}/.travis.yml", "r") as travis_yml: + try: + assert yaml.load(travis_yml)["script"] == ["pytest"] + except yaml.YAMLError as e: + pytest.fail(e) diff --git a/tests/test_docker.sh b/tests/test_docker.sh index eddfe98c6..55771c14c 100755 --- a/tests/test_docker.sh +++ b/tests/test_docker.sh @@ -3,6 +3,8 @@ # it is meant to be run from the root directory of the repository, eg: # sh tests/test_docker.sh +set -o errexit + # install test requirements pip install -r requirements.txt @@ -11,12 +13,15 @@ mkdir -p .cache/docker cd .cache/docker # create the project using the default settings in cookiecutter.json -cookiecutter ../../ --no-input --overwrite-if-exists use_docker=y +cookiecutter ../../ --no-input --overwrite-if-exists use_docker=y $@ cd my_awesome_project # run the project's type checks docker-compose -f local.yml run django mypy my_awesome_project +# Run black with --check option +docker-compose -f local.yml run django black --check --diff --exclude 'migrations' ./ + # run the project's tests docker-compose -f local.yml run django pytest diff --git a/tox.ini b/tox.ini index 040c8a41c..cef3efc78 100644 --- a/tox.ini +++ b/tox.ini @@ -1,7 +1,11 @@ [tox] skipsdist = true -envlist = py36 +envlist = py36,black [testenv] deps = -rrequirements.txt commands = pytest {posargs:./tests} + +[testenv:black] +deps = black +commands = black --check hooks tests setup.py docs diff --git a/{{cookiecutter.project_slug}}/.envs/.local/.django b/{{cookiecutter.project_slug}}/.envs/.local/.django index 2ed5fbf3d..919f31185 100644 --- a/{{cookiecutter.project_slug}}/.envs/.local/.django +++ b/{{cookiecutter.project_slug}}/.envs/.local/.django @@ -3,10 +3,11 @@ USE_DOCKER=yes IPYTHONDIR=/app/.ipython +{%- if cookiecutter.use_celery == 'y' %} # Redis # ------------------------------------------------------------------------------ REDIS_URL=redis://redis:6379/0 -{% if cookiecutter.use_celery == 'y' %} + # Celery # ------------------------------------------------------------------------------ diff --git a/{{cookiecutter.project_slug}}/.envs/.production/.caddy b/{{cookiecutter.project_slug}}/.envs/.production/.caddy deleted file mode 100644 index 83d7fc7af..000000000 --- a/{{cookiecutter.project_slug}}/.envs/.production/.caddy +++ /dev/null @@ -1,3 +0,0 @@ -# Caddy -# ------------------------------------------------------------------------------ -DOMAIN_NAME={{ cookiecutter.domain_name }} diff --git a/{{cookiecutter.project_slug}}/.envs/.production/.django b/{{cookiecutter.project_slug}}/.envs/.production/.django index 4175f8944..a938ada68 100644 --- a/{{cookiecutter.project_slug}}/.envs/.production/.django +++ b/{{cookiecutter.project_slug}}/.envs/.production/.django @@ -16,13 +16,18 @@ DJANGO_SECURE_SSL_REDIRECT=False MAILGUN_API_KEY= DJANGO_SERVER_EMAIL= MAILGUN_DOMAIN= - +{% if cookiecutter.cloud_provider == 'AWS' %} # AWS # ------------------------------------------------------------------------------ DJANGO_AWS_ACCESS_KEY_ID= DJANGO_AWS_SECRET_ACCESS_KEY= DJANGO_AWS_STORAGE_BUCKET_NAME= - +{% elif cookiecutter.cloud_provider == 'GCE' %} +# GCE +# ------------------------------------------------------------------------------ +GOOGLE_APPLICATION_CREDENTIALS= +DJANGO_GCE_STORAGE_BUCKET_NAME= +{% endif %} # django-allauth # ------------------------------------------------------------------------------ DJANGO_ACCOUNT_ALLOW_REGISTRATION=True diff --git a/{{cookiecutter.project_slug}}/.gitignore b/{{cookiecutter.project_slug}}/.gitignore index 1874e9d92..cb8ad8358 100644 --- a/{{cookiecutter.project_slug}}/.gitignore +++ b/{{cookiecutter.project_slug}}/.gitignore @@ -325,7 +325,6 @@ tags ### VirtualEnv template # Virtualenv -# http://iamzed.com/2009/05/07/a-primer-on-virtualenv/ [Bb]in [Ii]nclude [Ll]ib diff --git a/{{cookiecutter.project_slug}}/.pylintrc b/{{cookiecutter.project_slug}}/.pylintrc index 1a1992862..a0955f076 100644 --- a/{{cookiecutter.project_slug}}/.pylintrc +++ b/{{cookiecutter.project_slug}}/.pylintrc @@ -1,5 +1,5 @@ [MASTER] -load-plugins=pylint_common, pylint_django{% if cookiecutter.use_celery == "y" %}, pylint_celery {% endif %} +load-plugins=pylint_django{% if cookiecutter.use_celery == "y" %}, pylint_celery {% endif %} [FORMAT] max-line-length=120 diff --git a/{{cookiecutter.project_slug}}/.travis.yml b/{{cookiecutter.project_slug}}/.travis.yml index 5ca54d000..3072f75f3 100644 --- a/{{cookiecutter.project_slug}}/.travis.yml +++ b/{{cookiecutter.project_slug}}/.travis.yml @@ -9,3 +9,7 @@ before_install: language: python python: - "3.6" +install: + - pip install -r requirements/local.txt +script: + - "pytest" diff --git a/{{cookiecutter.project_slug}}/Procfile b/{{cookiecutter.project_slug}}/Procfile index c77d76d96..d9319f5cc 100644 --- a/{{cookiecutter.project_slug}}/Procfile +++ b/{{cookiecutter.project_slug}}/Procfile @@ -1,3 +1,4 @@ +release: python manage.py migrate web: gunicorn config.wsgi:application {% if cookiecutter.use_celery == "y" -%} worker: celery worker --app={{cookiecutter.project_slug}}.taskapp --loglevel=info diff --git a/{{cookiecutter.project_slug}}/README.rst b/{{cookiecutter.project_slug}}/README.rst index 49df70198..6ef3b63c4 100644 --- a/{{cookiecutter.project_slug}}/README.rst +++ b/{{cookiecutter.project_slug}}/README.rst @@ -6,6 +6,9 @@ .. image:: https://img.shields.io/badge/built%20with-Cookiecutter%20Django-ff69b4.svg :target: https://github.com/pydanny/cookiecutter-django/ :alt: Built with Cookiecutter Django +.. image:: https://img.shields.io/badge/code%20style-black-000000.svg + :target: https://github.com/ambv/black + :alt: Black code style {% if cookiecutter.open_source_license != "Not open source" %} :License: {{cookiecutter.open_source_license}} diff --git a/{{cookiecutter.project_slug}}/compose/local/node/Dockerfile b/{{cookiecutter.project_slug}}/compose/local/node/Dockerfile new file mode 100644 index 000000000..f9976e206 --- /dev/null +++ b/{{cookiecutter.project_slug}}/compose/local/node/Dockerfile @@ -0,0 +1,9 @@ +FROM node:10-stretch-slim + +WORKDIR /app + +COPY ./package.json /app + +RUN npm install && npm cache clean --force + +ENV PATH ./node_modules/.bin/:$PATH diff --git a/{{cookiecutter.project_slug}}/compose/production/aws/Dockerfile b/{{cookiecutter.project_slug}}/compose/production/aws/Dockerfile new file mode 100644 index 000000000..8282047b3 --- /dev/null +++ b/{{cookiecutter.project_slug}}/compose/production/aws/Dockerfile @@ -0,0 +1,9 @@ +FROM garland/aws-cli-docker:1.15.47 + +COPY ./compose/production/aws/maintenance /usr/local/bin/maintenance +COPY ./compose/production/postgres/maintenance/_sourced /usr/local/bin/maintenance/_sourced + +RUN chmod +x /usr/local/bin/maintenance/* + +RUN mv /usr/local/bin/maintenance/* /usr/local/bin \ + && rmdir /usr/local/bin/maintenance diff --git a/{{cookiecutter.project_slug}}/compose/production/aws/maintenance/download b/{{cookiecutter.project_slug}}/compose/production/aws/maintenance/download new file mode 100644 index 000000000..8d5ea091f --- /dev/null +++ b/{{cookiecutter.project_slug}}/compose/production/aws/maintenance/download @@ -0,0 +1,24 @@ +#!/bin/sh + +### Download a file from your Amazon S3 bucket to the postgres /backups folder +### +### Usage: +### $ docker-compose -f production.yml run --rm awscli <1> + +set -o errexit +set -o pipefail +set -o nounset + +working_dir="$(dirname ${0})" +source "${working_dir}/_sourced/constants.sh" +source "${working_dir}/_sourced/messages.sh" + +export AWS_ACCESS_KEY_ID="${DJANGO_AWS_ACCESS_KEY_ID}" +export AWS_SECRET_ACCESS_KEY="${DJANGO_AWS_SECRET_ACCESS_KEY}" +export AWS_STORAGE_BUCKET_NAME="${DJANGO_AWS_STORAGE_BUCKET_NAME}" + + +aws s3 cp s3://${AWS_STORAGE_BUCKET_NAME}${BACKUP_DIR_PATH}/${1} ${BACKUP_DIR_PATH}/${1} + +message_success "Finished downloading ${1}." + diff --git a/{{cookiecutter.project_slug}}/compose/production/aws/maintenance/upload b/{{cookiecutter.project_slug}}/compose/production/aws/maintenance/upload new file mode 100644 index 000000000..4a89dcb5a --- /dev/null +++ b/{{cookiecutter.project_slug}}/compose/production/aws/maintenance/upload @@ -0,0 +1,30 @@ +#!/bin/sh + +### Upload the /backups folder to Amazon S3 +### +### Usage: +### $ docker-compose -f production.yml run --rm awscli upload + +set -o errexit +set -o pipefail +set -o nounset + +working_dir="$(dirname ${0})" +source "${working_dir}/_sourced/constants.sh" +source "${working_dir}/_sourced/messages.sh" + +export AWS_ACCESS_KEY_ID="${DJANGO_AWS_ACCESS_KEY_ID}" +export AWS_SECRET_ACCESS_KEY="${DJANGO_AWS_SECRET_ACCESS_KEY}" +export AWS_STORAGE_BUCKET_NAME="${DJANGO_AWS_STORAGE_BUCKET_NAME}" + + +message_info "Upload the backups directory to S3 bucket {$AWS_STORAGE_BUCKET_NAME}" + +aws s3 cp ${BACKUP_DIR_PATH} s3://${AWS_STORAGE_BUCKET_NAME}${BACKUP_DIR_PATH} --recursive + +message_info "Cleaning the directory ${BACKUP_DIR_PATH}" + +rm -rf ${BACKUP_DIR_PATH}/* + +message_success "Finished uploading and cleaning." + diff --git a/{{cookiecutter.project_slug}}/compose/production/caddy/Caddyfile b/{{cookiecutter.project_slug}}/compose/production/caddy/Caddyfile deleted file mode 100644 index 323e43922..000000000 --- a/{{cookiecutter.project_slug}}/compose/production/caddy/Caddyfile +++ /dev/null @@ -1,15 +0,0 @@ -www.{% raw %}{$DOMAIN_NAME}{% endraw %} { - redir https://{% raw %}{$DOMAIN_NAME}{% endraw %} -} - -{% raw %}{$DOMAIN_NAME}{% endraw %} { - proxy / django:5000 { - header_upstream Host {host} - header_upstream X-Real-IP {remote} - header_upstream X-Forwarded-Proto {scheme} - header_upstream X-CSRFToken {~csrftoken} - } - log stdout - errors stdout - gzip -} diff --git a/{{cookiecutter.project_slug}}/compose/production/caddy/Dockerfile b/{{cookiecutter.project_slug}}/compose/production/caddy/Dockerfile deleted file mode 100644 index c32efb3ee..000000000 --- a/{{cookiecutter.project_slug}}/compose/production/caddy/Dockerfile +++ /dev/null @@ -1,3 +0,0 @@ -FROM abiosoft/caddy:0.11.0 - -COPY ./compose/production/caddy/Caddyfile /etc/Caddyfile diff --git a/{{cookiecutter.project_slug}}/compose/production/django/Dockerfile b/{{cookiecutter.project_slug}}/compose/production/django/Dockerfile index 68d72327f..80b5bc2cf 100644 --- a/{{cookiecutter.project_slug}}/compose/production/django/Dockerfile +++ b/{{cookiecutter.project_slug}}/compose/production/django/Dockerfile @@ -1,3 +1,14 @@ +{% if cookiecutter.js_task_runner == 'Gulp' -%} +FROM node:10-stretch-slim as client-builder + +WORKDIR /app +COPY ./package.json /app +RUN npm install && npm cache clean --force +COPY . /app +RUN npm run build + +# Python build stage +{%- endif %} FROM python:3.6-alpine ENV PYTHONUNBUFFERED 1 @@ -28,7 +39,8 @@ COPY ./compose/production/django/start /start RUN sed -i 's/\r//' /start RUN chmod +x /start RUN chown django /start -{% if cookiecutter.use_celery == "y" %} + +{%- if cookiecutter.use_celery == "y" %} COPY ./compose/production/django/celery/worker/start /start-celeryworker RUN sed -i 's/\r//' /start-celeryworker RUN chmod +x /start-celeryworker @@ -42,8 +54,13 @@ RUN chown django /start-celerybeat COPY ./compose/production/django/celery/flower/start /start-flower RUN sed -i 's/\r//' /start-flower RUN chmod +x /start-flower -{% endif %} +{%- endif %} + +{%- if cookiecutter.js_task_runner == 'Gulp' %} +COPY --from=client-builder /app /app +{% else %} COPY . /app +{%- endif %} RUN chown -R django /app diff --git a/{{cookiecutter.project_slug}}/compose/production/django/entrypoint b/{{cookiecutter.project_slug}}/compose/production/django/entrypoint index 4845e343b..0a76b3107 100644 --- a/{{cookiecutter.project_slug}}/compose/production/django/entrypoint +++ b/{{cookiecutter.project_slug}}/compose/production/django/entrypoint @@ -5,8 +5,10 @@ set -o pipefail set -o nounset +{% if cookiecutter.use_celery == 'y' %} # N.B. If only .env files supported variable expansion... export CELERY_BROKER_URL="${REDIS_URL}" +{% endif %} if [ -z "${POSTGRES_USER}" ]; then base_postgres_image_default_user='postgres' diff --git a/{{cookiecutter.project_slug}}/compose/production/traefik/Dockerfile b/{{cookiecutter.project_slug}}/compose/production/traefik/Dockerfile new file mode 100644 index 000000000..7088e6fe3 --- /dev/null +++ b/{{cookiecutter.project_slug}}/compose/production/traefik/Dockerfile @@ -0,0 +1,5 @@ +FROM traefik:alpine +RUN mkdir -p /etc/traefik/acme +RUN touch /etc/traefik/acme/acme.json +RUN chmod 600 /etc/traefik/acme/acme.json +COPY ./compose/production/traefik/traefik.toml /etc/traefik diff --git a/{{cookiecutter.project_slug}}/compose/production/traefik/traefik.toml b/{{cookiecutter.project_slug}}/compose/production/traefik/traefik.toml new file mode 100644 index 000000000..ad1f20e9e --- /dev/null +++ b/{{cookiecutter.project_slug}}/compose/production/traefik/traefik.toml @@ -0,0 +1,41 @@ +logLevel = "INFO" +defaultEntryPoints = ["http", "https"] + +# Entrypoints, http and https +[entryPoints] + # http should be redirected to https + [entryPoints.http] + address = ":80" + [entryPoints.http.redirect] + entryPoint = "https" + # https is the default + [entryPoints.https] + address = ":443" + [entryPoints.https.tls] + +# Enable ACME (Let's Encrypt): automatic SSL +[acme] +# Email address used for registration +email = "{{ cookiecutter.email }}" +storageFile = "/etc/traefik/acme/acme.json" +entryPoint = "https" +onDemand = false +OnHostRule = true + # Use a HTTP-01 acme challenge rather than TLS-SNI-01 challenge + [acme.httpChallenge] + entryPoint = "http" + +[file] +[backends] + [backends.django] + [backends.django.servers.server1] + url = "http://django:5000" + +[frontends] + [frontends.django] + backend = "django" + passHostHeader = true + [frontends.django.headers] + HostsProxyHeaders = ['X-CSRFToken'] + [frontends.django.routes.dr1] + rule = "Host:{{ cookiecutter.domain_name }}" diff --git a/{{cookiecutter.project_slug}}/config/settings/base.py b/{{cookiecutter.project_slug}}/config/settings/base.py index d22d09c0b..4213d99ec 100644 --- a/{{cookiecutter.project_slug}}/config/settings/base.py +++ b/{{cookiecutter.project_slug}}/config/settings/base.py @@ -4,27 +4,29 @@ Base settings to build other settings files upon. import environ -ROOT_DIR = environ.Path(__file__) - 3 # ({{ cookiecutter.project_slug }}/config/settings/base.py - 3 = {{ cookiecutter.project_slug }}/) -APPS_DIR = ROOT_DIR.path('{{ cookiecutter.project_slug }}') +ROOT_DIR = ( + environ.Path(__file__) - 3 +) # ({{ cookiecutter.project_slug }}/config/settings/base.py - 3 = {{ cookiecutter.project_slug }}/) +APPS_DIR = ROOT_DIR.path("{{ cookiecutter.project_slug }}") env = environ.Env() -READ_DOT_ENV_FILE = env.bool('DJANGO_READ_DOT_ENV_FILE', default=False) +READ_DOT_ENV_FILE = env.bool("DJANGO_READ_DOT_ENV_FILE", default=False) if READ_DOT_ENV_FILE: # OS environment variables take precedence over variables from .env - env.read_env(str(ROOT_DIR.path('.env'))) + env.read_env(str(ROOT_DIR.path(".env"))) # GENERAL # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#debug -DEBUG = env.bool('DJANGO_DEBUG', False) +DEBUG = env.bool("DJANGO_DEBUG", False) # Local time zone. Choices are # http://en.wikipedia.org/wiki/List_of_tz_zones_by_name # though not all of them may be available with every OS. # In Windows, this must be set to your system time zone. -TIME_ZONE = '{{ cookiecutter.timezone }}' +TIME_ZONE = "{{ cookiecutter.timezone }}" # https://docs.djangoproject.com/en/dev/ref/settings/#language-code -LANGUAGE_CODE = 'en-us' +LANGUAGE_CODE = "en-us" # https://docs.djangoproject.com/en/dev/ref/settings/#site-id SITE_ID = 1 # https://docs.djangoproject.com/en/dev/ref/settings/#use-i18n @@ -37,45 +39,43 @@ USE_TZ = True # DATABASES # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#databases -{% if cookiecutter.use_docker == 'y' -%} -DATABASES = { - 'default': env.db('DATABASE_URL'), -} +{% if cookiecutter.use_docker == "y" -%} +DATABASES = {"default": env.db("DATABASE_URL")} {%- else %} DATABASES = { - 'default': env.db('DATABASE_URL', default='postgres://{% if cookiecutter.windows == 'y' %}localhost{% endif %}/{{cookiecutter.project_slug}}'), + "default": env.db("DATABASE_URL", default="postgres://{% if cookiecutter.windows == 'y' %}localhost{% endif %}/{{cookiecutter.project_slug}}") } {%- endif %} -DATABASES['default']['ATOMIC_REQUESTS'] = True +DATABASES["default"]["ATOMIC_REQUESTS"] = True # URLS # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#root-urlconf -ROOT_URLCONF = 'config.urls' +ROOT_URLCONF = "config.urls" # https://docs.djangoproject.com/en/dev/ref/settings/#wsgi-application -WSGI_APPLICATION = 'config.wsgi.application' +WSGI_APPLICATION = "config.wsgi.application" # APPS # ------------------------------------------------------------------------------ DJANGO_APPS = [ - 'django.contrib.auth', - 'django.contrib.contenttypes', - 'django.contrib.sessions', - 'django.contrib.sites', - 'django.contrib.messages', - 'django.contrib.staticfiles', - # 'django.contrib.humanize', # Handy template tags - 'django.contrib.admin', + "django.contrib.auth", + "django.contrib.contenttypes", + "django.contrib.sessions", + "django.contrib.sites", + "django.contrib.messages", + "django.contrib.staticfiles", + # "django.contrib.humanize", # Handy template tags + "django.contrib.admin", ] THIRD_PARTY_APPS = [ - 'crispy_forms', - 'allauth', - 'allauth.account', - 'allauth.socialaccount', - 'rest_framework', + "crispy_forms", + "allauth", + "allauth.account", + "allauth.socialaccount", + "rest_framework", ] LOCAL_APPS = [ - '{{ cookiecutter.project_slug }}.users.apps.UsersAppConfig', + "{{ cookiecutter.project_slug }}.users.apps.UsersAppConfig", # Your stuff: custom apps go here ] # https://docs.djangoproject.com/en/dev/ref/settings/#installed-apps @@ -84,86 +84,76 @@ INSTALLED_APPS = DJANGO_APPS + THIRD_PARTY_APPS + LOCAL_APPS # MIGRATIONS # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#migration-modules -MIGRATION_MODULES = { - 'sites': '{{ cookiecutter.project_slug }}.contrib.sites.migrations' -} +MIGRATION_MODULES = {"sites": "{{ cookiecutter.project_slug }}.contrib.sites.migrations"} # AUTHENTICATION # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#authentication-backends AUTHENTICATION_BACKENDS = [ - 'django.contrib.auth.backends.ModelBackend', - 'allauth.account.auth_backends.AuthenticationBackend', + "django.contrib.auth.backends.ModelBackend", + "allauth.account.auth_backends.AuthenticationBackend", ] # https://docs.djangoproject.com/en/dev/ref/settings/#auth-user-model -AUTH_USER_MODEL = 'users.User' +AUTH_USER_MODEL = "users.User" # https://docs.djangoproject.com/en/dev/ref/settings/#login-redirect-url -LOGIN_REDIRECT_URL = 'users:redirect' +LOGIN_REDIRECT_URL = "users:redirect" # https://docs.djangoproject.com/en/dev/ref/settings/#login-url -LOGIN_URL = 'account_login' +LOGIN_URL = "account_login" # PASSWORDS # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#password-hashers PASSWORD_HASHERS = [ # https://docs.djangoproject.com/en/dev/topics/auth/passwords/#using-argon2-with-django - 'django.contrib.auth.hashers.Argon2PasswordHasher', - 'django.contrib.auth.hashers.PBKDF2PasswordHasher', - 'django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher', - 'django.contrib.auth.hashers.BCryptSHA256PasswordHasher', - 'django.contrib.auth.hashers.BCryptPasswordHasher', + "django.contrib.auth.hashers.Argon2PasswordHasher", + "django.contrib.auth.hashers.PBKDF2PasswordHasher", + "django.contrib.auth.hashers.PBKDF2SHA1PasswordHasher", + "django.contrib.auth.hashers.BCryptSHA256PasswordHasher", + "django.contrib.auth.hashers.BCryptPasswordHasher", ] # https://docs.djangoproject.com/en/dev/ref/settings/#auth-password-validators AUTH_PASSWORD_VALIDATORS = [ { - 'NAME': 'django.contrib.auth.password_validation.UserAttributeSimilarityValidator', - }, - { - 'NAME': 'django.contrib.auth.password_validation.MinimumLengthValidator', - }, - { - 'NAME': 'django.contrib.auth.password_validation.CommonPasswordValidator', - }, - { - 'NAME': 'django.contrib.auth.password_validation.NumericPasswordValidator', + "NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator" }, + {"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator"}, + {"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator"}, + {"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator"}, ] # MIDDLEWARE # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#middleware MIDDLEWARE = [ - 'django.middleware.security.SecurityMiddleware', - 'django.contrib.sessions.middleware.SessionMiddleware', - 'django.middleware.common.CommonMiddleware', - 'django.middleware.csrf.CsrfViewMiddleware', - 'django.contrib.auth.middleware.AuthenticationMiddleware', - 'django.contrib.messages.middleware.MessageMiddleware', - 'django.middleware.clickjacking.XFrameOptionsMiddleware', + "django.middleware.security.SecurityMiddleware", + "django.contrib.sessions.middleware.SessionMiddleware", + "django.middleware.common.CommonMiddleware", + "django.middleware.csrf.CsrfViewMiddleware", + "django.contrib.auth.middleware.AuthenticationMiddleware", + "django.contrib.messages.middleware.MessageMiddleware", + "django.middleware.clickjacking.XFrameOptionsMiddleware", ] # STATIC # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#static-root -STATIC_ROOT = str(ROOT_DIR('staticfiles')) +STATIC_ROOT = str(ROOT_DIR("staticfiles")) # https://docs.djangoproject.com/en/dev/ref/settings/#static-url -STATIC_URL = '/static/' +STATIC_URL = "/static/" # https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#std:setting-STATICFILES_DIRS -STATICFILES_DIRS = [ - str(APPS_DIR.path('static')), -] +STATICFILES_DIRS = [str(APPS_DIR.path("static"))] # https://docs.djangoproject.com/en/dev/ref/contrib/staticfiles/#staticfiles-finders STATICFILES_FINDERS = [ - 'django.contrib.staticfiles.finders.FileSystemFinder', - 'django.contrib.staticfiles.finders.AppDirectoriesFinder', + "django.contrib.staticfiles.finders.FileSystemFinder", + "django.contrib.staticfiles.finders.AppDirectoriesFinder", ] # MEDIA # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#media-root -MEDIA_ROOT = str(APPS_DIR('media')) +MEDIA_ROOT = str(APPS_DIR("media")) # https://docs.djangoproject.com/en/dev/ref/settings/#media-url -MEDIA_URL = '/media/' +MEDIA_URL = "/media/" # TEMPLATES # ------------------------------------------------------------------------------ @@ -171,44 +161,40 @@ MEDIA_URL = '/media/' TEMPLATES = [ { # https://docs.djangoproject.com/en/dev/ref/settings/#std:setting-TEMPLATES-BACKEND - 'BACKEND': 'django.template.backends.django.DjangoTemplates', + "BACKEND": "django.template.backends.django.DjangoTemplates", # https://docs.djangoproject.com/en/dev/ref/settings/#template-dirs - 'DIRS': [ - str(APPS_DIR.path('templates')), - ], - 'OPTIONS': { + "DIRS": [str(APPS_DIR.path("templates"))], + "OPTIONS": { # https://docs.djangoproject.com/en/dev/ref/settings/#template-debug - 'debug': DEBUG, + "debug": DEBUG, # https://docs.djangoproject.com/en/dev/ref/settings/#template-loaders # https://docs.djangoproject.com/en/dev/ref/templates/api/#loader-types - 'loaders': [ - 'django.template.loaders.filesystem.Loader', - 'django.template.loaders.app_directories.Loader', + "loaders": [ + "django.template.loaders.filesystem.Loader", + "django.template.loaders.app_directories.Loader", ], # https://docs.djangoproject.com/en/dev/ref/settings/#template-context-processors - 'context_processors': [ - 'django.template.context_processors.debug', - 'django.template.context_processors.request', - 'django.contrib.auth.context_processors.auth', - 'django.template.context_processors.i18n', - 'django.template.context_processors.media', - 'django.template.context_processors.static', - 'django.template.context_processors.tz', - 'django.contrib.messages.context_processors.messages', - '{{cookiecutter.project_slug}}.users.context_processors.allauth_settings', + "context_processors": [ + "django.template.context_processors.debug", + "django.template.context_processors.request", + "django.contrib.auth.context_processors.auth", + "django.template.context_processors.i18n", + "django.template.context_processors.media", + "django.template.context_processors.static", + "django.template.context_processors.tz", + "django.contrib.messages.context_processors.messages", + "{{cookiecutter.project_slug}}.users.context_processors.allauth_settings", ], }, - }, + } ] # http://django-crispy-forms.readthedocs.io/en/latest/install.html#template-packs -CRISPY_TEMPLATE_PACK = 'bootstrap4' +CRISPY_TEMPLATE_PACK = "bootstrap4" # FIXTURES # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#fixture-dirs -FIXTURE_DIRS = ( - str(APPS_DIR.path('fixtures')), -) +FIXTURE_DIRS = (str(APPS_DIR.path("fixtures")),) # SECURITY # ------------------------------------------------------------------------------ @@ -219,41 +205,41 @@ CSRF_COOKIE_HTTPONLY = True # https://docs.djangoproject.com/en/dev/ref/settings/#secure-browser-xss-filter SECURE_BROWSER_XSS_FILTER = True # https://docs.djangoproject.com/en/dev/ref/settings/#x-frame-options -X_FRAME_OPTIONS = 'DENY' +X_FRAME_OPTIONS = "DENY" # EMAIL # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#email-backend -EMAIL_BACKEND = env('DJANGO_EMAIL_BACKEND', default='django.core.mail.backends.smtp.EmailBackend') +EMAIL_BACKEND = env( + "DJANGO_EMAIL_BACKEND", default="django.core.mail.backends.smtp.EmailBackend" +) # ADMIN # ------------------------------------------------------------------------------ # Django Admin URL. -ADMIN_URL = 'admin/' +ADMIN_URL = "admin/" # https://docs.djangoproject.com/en/dev/ref/settings/#admins -ADMINS = [ - ("""{{cookiecutter.author_name}}""", '{{cookiecutter.email}}'), -] +ADMINS = [("""{{cookiecutter.author_name}}""", "{{cookiecutter.email}}")] # https://docs.djangoproject.com/en/dev/ref/settings/#managers MANAGERS = ADMINS {% if cookiecutter.use_celery == 'y' -%} # Celery # ------------------------------------------------------------------------------ -INSTALLED_APPS += ['{{cookiecutter.project_slug}}.taskapp.celery.CeleryAppConfig'] +INSTALLED_APPS += ["{{cookiecutter.project_slug}}.taskapp.celery.CeleryAppConfig"] if USE_TZ: # http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-timezone CELERY_TIMEZONE = TIME_ZONE # http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-broker_url -CELERY_BROKER_URL = env('CELERY_BROKER_URL') +CELERY_BROKER_URL = env("CELERY_BROKER_URL") # http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-result_backend CELERY_RESULT_BACKEND = CELERY_BROKER_URL # http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-accept_content -CELERY_ACCEPT_CONTENT = ['json'] +CELERY_ACCEPT_CONTENT = ["json"] # http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-task_serializer -CELERY_TASK_SERIALIZER = 'json' +CELERY_TASK_SERIALIZER = "json" # http://docs.celeryproject.org/en/latest/userguide/configuration.html#std:setting-result_serializer -CELERY_RESULT_SERIALIZER = 'json' +CELERY_RESULT_SERIALIZER = "json" # http://docs.celeryproject.org/en/latest/userguide/configuration.html#task-time-limit # TODO: set to whatever value is adequate in your circumstances CELERYD_TASK_TIME_LIMIT = 5 * 60 @@ -264,24 +250,24 @@ CELERYD_TASK_SOFT_TIME_LIMIT = 60 {%- endif %} # django-allauth # ------------------------------------------------------------------------------ -ACCOUNT_ALLOW_REGISTRATION = env.bool('DJANGO_ACCOUNT_ALLOW_REGISTRATION', True) +ACCOUNT_ALLOW_REGISTRATION = env.bool("DJANGO_ACCOUNT_ALLOW_REGISTRATION", True) # https://django-allauth.readthedocs.io/en/latest/configuration.html -ACCOUNT_AUTHENTICATION_METHOD = 'username' +ACCOUNT_AUTHENTICATION_METHOD = "username" # https://django-allauth.readthedocs.io/en/latest/configuration.html ACCOUNT_EMAIL_REQUIRED = True # https://django-allauth.readthedocs.io/en/latest/configuration.html -ACCOUNT_EMAIL_VERIFICATION = 'mandatory' +ACCOUNT_EMAIL_VERIFICATION = "mandatory" # https://django-allauth.readthedocs.io/en/latest/configuration.html -ACCOUNT_ADAPTER = '{{cookiecutter.project_slug}}.users.adapters.AccountAdapter' +ACCOUNT_ADAPTER = "{{cookiecutter.project_slug}}.users.adapters.AccountAdapter" # https://django-allauth.readthedocs.io/en/latest/configuration.html -SOCIALACCOUNT_ADAPTER = '{{cookiecutter.project_slug}}.users.adapters.SocialAccountAdapter' +SOCIALACCOUNT_ADAPTER = "{{cookiecutter.project_slug}}.users.adapters.SocialAccountAdapter" {% if cookiecutter.use_compressor == 'y' -%} # django-compressor # ------------------------------------------------------------------------------ # https://django-compressor.readthedocs.io/en/latest/quickstart/#installation -INSTALLED_APPS += ['compressor'] -STATICFILES_FINDERS += ['compressor.finders.CompressorFinder'] +INSTALLED_APPS += ["compressor"] +STATICFILES_FINDERS += ["compressor.finders.CompressorFinder"] {%- endif %} # Your stuff... diff --git a/{{cookiecutter.project_slug}}/config/settings/local.py b/{{cookiecutter.project_slug}}/config/settings/local.py index 741f324af..0c0588bd4 100644 --- a/{{cookiecutter.project_slug}}/config/settings/local.py +++ b/{{cookiecutter.project_slug}}/config/settings/local.py @@ -6,42 +6,43 @@ from .base import env # https://docs.djangoproject.com/en/dev/ref/settings/#debug DEBUG = True # https://docs.djangoproject.com/en/dev/ref/settings/#secret-key -SECRET_KEY = env('DJANGO_SECRET_KEY', default='!!!SET DJANGO_SECRET_KEY!!!') +SECRET_KEY = env( + "DJANGO_SECRET_KEY", + default="!!!SET DJANGO_SECRET_KEY!!!", +) # https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts -ALLOWED_HOSTS = [ - "localhost", - "0.0.0.0", - "127.0.0.1", -] +ALLOWED_HOSTS = ["localhost", "0.0.0.0", "127.0.0.1"] # CACHES # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#caches CACHES = { - 'default': { - 'BACKEND': 'django.core.cache.backends.locmem.LocMemCache', - 'LOCATION': '' + "default": { + "BACKEND": "django.core.cache.backends.locmem.LocMemCache", + "LOCATION": "", } } # TEMPLATES # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#templates -TEMPLATES[0]['OPTIONS']['debug'] = DEBUG # noqa F405 +TEMPLATES[0]["OPTIONS"]["debug"] = DEBUG # noqa F405 # EMAIL # ------------------------------------------------------------------------------ {% if cookiecutter.use_mailhog == 'y' and cookiecutter.use_docker == 'y' -%} # https://docs.djangoproject.com/en/dev/ref/settings/#email-host -EMAIL_HOST = env('EMAIL_HOST', default='mailhog') +EMAIL_HOST = env("EMAIL_HOST", default="mailhog") {%- elif cookiecutter.use_mailhog == 'y' and cookiecutter.use_docker == 'n' -%} # https://docs.djangoproject.com/en/dev/ref/settings/#email-host -EMAIL_HOST = 'localhost' +EMAIL_HOST = "localhost" {%- else -%} # https://docs.djangoproject.com/en/dev/ref/settings/#email-backend -EMAIL_BACKEND = env('DJANGO_EMAIL_BACKEND', default='django.core.mail.backends.console.EmailBackend') +EMAIL_BACKEND = env( + "DJANGO_EMAIL_BACKEND", default="django.core.mail.backends.console.EmailBackend" +) # https://docs.djangoproject.com/en/dev/ref/settings/#email-host -EMAIL_HOST = 'localhost' +EMAIL_HOST = "localhost" {%- endif %} # https://docs.djangoproject.com/en/dev/ref/settings/#email-port EMAIL_PORT = 1025 @@ -49,35 +50,36 @@ EMAIL_PORT = 1025 # django-debug-toolbar # ------------------------------------------------------------------------------ # https://django-debug-toolbar.readthedocs.io/en/latest/installation.html#prerequisites -INSTALLED_APPS += ['debug_toolbar'] # noqa F405 +INSTALLED_APPS += ["debug_toolbar"] # noqa F405 # https://django-debug-toolbar.readthedocs.io/en/latest/installation.html#middleware -MIDDLEWARE += ['debug_toolbar.middleware.DebugToolbarMiddleware'] # noqa F405 +MIDDLEWARE += ["debug_toolbar.middleware.DebugToolbarMiddleware"] # noqa F405 # https://django-debug-toolbar.readthedocs.io/en/latest/configuration.html#debug-toolbar-config DEBUG_TOOLBAR_CONFIG = { - 'DISABLE_PANELS': [ - 'debug_toolbar.panels.redirects.RedirectsPanel', - ], - 'SHOW_TEMPLATE_CONTEXT': True, + "DISABLE_PANELS": ["debug_toolbar.panels.redirects.RedirectsPanel"], + "SHOW_TEMPLATE_CONTEXT": True, } # https://django-debug-toolbar.readthedocs.io/en/latest/installation.html#internal-ips -INTERNAL_IPS = ['127.0.0.1', '10.0.2.2'] +INTERNAL_IPS = ["127.0.0.1", "10.0.2.2"] {% if cookiecutter.use_docker == 'y' -%} -if env('USE_DOCKER') == 'yes': +if env("USE_DOCKER") == "yes": import socket + hostname, _, ips = socket.gethostbyname_ex(socket.gethostname()) - INTERNAL_IPS += [ip[:-1] + '1' for ip in ips] + INTERNAL_IPS += [ip[:-1] + "1" for ip in ips] {%- endif %} # django-extensions # ------------------------------------------------------------------------------ # https://django-extensions.readthedocs.io/en/latest/installation_instructions.html#configuration -INSTALLED_APPS += ['django_extensions'] # noqa F405 +INSTALLED_APPS += ["django_extensions"] # noqa F405 {% if cookiecutter.use_celery == 'y' -%} # Celery # ------------------------------------------------------------------------------ +{% if cookiecutter.use_docker == 'n' -%} # http://docs.celeryproject.org/en/latest/userguide/configuration.html#task-always-eager CELERY_TASK_ALWAYS_EAGER = True +{%- endif %} # http://docs.celeryproject.org/en/latest/userguide/configuration.html#task-eager-propagates CELERY_TASK_EAGER_PROPAGATES = True diff --git a/{{cookiecutter.project_slug}}/config/settings/production.py b/{{cookiecutter.project_slug}}/config/settings/production.py index e77d4304c..4adbdb9c4 100644 --- a/{{cookiecutter.project_slug}}/config/settings/production.py +++ b/{{cookiecutter.project_slug}}/config/settings/production.py @@ -1,6 +1,14 @@ {% if cookiecutter.use_sentry == 'y' -%} import logging +import sentry_sdk + +from sentry_sdk.integrations.django import DjangoIntegration +from sentry_sdk.integrations.logging import LoggingIntegration +{%- if cookiecutter.use_celery == 'y' %} +from sentry_sdk.integrations.celery import CeleryIntegration +{% endif %} + {% endif -%} from .base import * # noqa from .base import env @@ -8,37 +16,37 @@ from .base import env # GENERAL # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#secret-key -SECRET_KEY = env('DJANGO_SECRET_KEY') +SECRET_KEY = env("DJANGO_SECRET_KEY") # https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts -ALLOWED_HOSTS = env.list('DJANGO_ALLOWED_HOSTS', default=['{{ cookiecutter.domain_name }}']) +ALLOWED_HOSTS = env.list("DJANGO_ALLOWED_HOSTS", default=["{{ cookiecutter.domain_name }}"]) # DATABASES # ------------------------------------------------------------------------------ -DATABASES['default'] = env.db('DATABASE_URL') # noqa F405 -DATABASES['default']['ATOMIC_REQUESTS'] = True # noqa F405 -DATABASES['default']['CONN_MAX_AGE'] = env.int('CONN_MAX_AGE', default=60) # noqa F405 +DATABASES["default"] = env.db("DATABASE_URL") # noqa F405 +DATABASES["default"]["ATOMIC_REQUESTS"] = True # noqa F405 +DATABASES["default"]["CONN_MAX_AGE"] = env.int("CONN_MAX_AGE", default=60) # noqa F405 # CACHES # ------------------------------------------------------------------------------ CACHES = { - 'default': { - 'BACKEND': 'django_redis.cache.RedisCache', - 'LOCATION': env('REDIS_URL'), - 'OPTIONS': { - 'CLIENT_CLASS': 'django_redis.client.DefaultClient', + "default": { + "BACKEND": "django_redis.cache.RedisCache", + "LOCATION": env("REDIS_URL"), + "OPTIONS": { + "CLIENT_CLASS": "django_redis.client.DefaultClient", # Mimicing memcache behavior. # http://niwinz.github.io/django-redis/latest/#_memcached_exceptions_behavior - 'IGNORE_EXCEPTIONS': True, - } + "IGNORE_EXCEPTIONS": True, + }, } } # SECURITY # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#secure-proxy-ssl-header -SECURE_PROXY_SSL_HEADER = ('HTTP_X_FORWARDED_PROTO', 'https') +SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https") # https://docs.djangoproject.com/en/dev/ref/settings/#secure-ssl-redirect -SECURE_SSL_REDIRECT = env.bool('DJANGO_SECURE_SSL_REDIRECT', default=True) +SECURE_SSL_REDIRECT = env.bool("DJANGO_SECURE_SSL_REDIRECT", default=True) # https://docs.djangoproject.com/en/dev/ref/settings/#session-cookie-secure SESSION_COOKIE_SECURE = True # https://docs.djangoproject.com/en/dev/ref/settings/#csrf-cookie-secure @@ -48,249 +56,247 @@ CSRF_COOKIE_SECURE = True # TODO: set this to 60 seconds first and then to 518400 once you prove the former works SECURE_HSTS_SECONDS = 60 # https://docs.djangoproject.com/en/dev/ref/settings/#secure-hsts-include-subdomains -SECURE_HSTS_INCLUDE_SUBDOMAINS = env.bool('DJANGO_SECURE_HSTS_INCLUDE_SUBDOMAINS', default=True) +SECURE_HSTS_INCLUDE_SUBDOMAINS = env.bool( + "DJANGO_SECURE_HSTS_INCLUDE_SUBDOMAINS", default=True +) # https://docs.djangoproject.com/en/dev/ref/settings/#secure-hsts-preload -SECURE_HSTS_PRELOAD = env.bool('DJANGO_SECURE_HSTS_PRELOAD', default=True) +SECURE_HSTS_PRELOAD = env.bool("DJANGO_SECURE_HSTS_PRELOAD", default=True) # https://docs.djangoproject.com/en/dev/ref/middleware/#x-content-type-options-nosniff -SECURE_CONTENT_TYPE_NOSNIFF = env.bool('DJANGO_SECURE_CONTENT_TYPE_NOSNIFF', default=True) +SECURE_CONTENT_TYPE_NOSNIFF = env.bool( + "DJANGO_SECURE_CONTENT_TYPE_NOSNIFF", default=True +) # STORAGES # ------------------------------------------------------------------------------ # https://django-storages.readthedocs.io/en/latest/#installation -INSTALLED_APPS += ['storages'] # noqa F405 +INSTALLED_APPS += ["storages"] # noqa F405 +{% if cookiecutter.cloud_provider == 'AWS' %} # https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html#settings -AWS_ACCESS_KEY_ID = env('DJANGO_AWS_ACCESS_KEY_ID') +AWS_ACCESS_KEY_ID = env("DJANGO_AWS_ACCESS_KEY_ID") # https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html#settings -AWS_SECRET_ACCESS_KEY = env('DJANGO_AWS_SECRET_ACCESS_KEY') +AWS_SECRET_ACCESS_KEY = env("DJANGO_AWS_SECRET_ACCESS_KEY") # https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html#settings -AWS_STORAGE_BUCKET_NAME = env('DJANGO_AWS_STORAGE_BUCKET_NAME') +AWS_STORAGE_BUCKET_NAME = env("DJANGO_AWS_STORAGE_BUCKET_NAME") # https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html#settings AWS_QUERYSTRING_AUTH = False # DO NOT change these unless you know what you're doing. _AWS_EXPIRY = 60 * 60 * 24 * 7 # https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html#settings AWS_S3_OBJECT_PARAMETERS = { - 'CacheControl': f'max-age={_AWS_EXPIRY}, s-maxage={_AWS_EXPIRY}, must-revalidate', + "CacheControl": f"max-age={_AWS_EXPIRY}, s-maxage={_AWS_EXPIRY}, must-revalidate" } +# https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html#settings +AWS_DEFAULT_ACL = None +# https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html#settings +AWS_S3_REGION_NAME = env("DJANGO_AWS_S3_REGION_NAME", default=None) +{% elif cookiecutter.cloud_provider == 'GCE' %} +DEFAULT_FILE_STORAGE = "storages.backends.gcloud.GoogleCloudStorage" +GS_BUCKET_NAME = env("DJANGO_GCE_STORAGE_BUCKET_NAME") +GS_DEFAULT_ACL = "publicRead" +{% endif %} # STATIC # ------------------------ {% if cookiecutter.use_whitenoise == 'y' -%} -STATICFILES_STORAGE = 'whitenoise.storage.CompressedManifestStaticFilesStorage' -{%- else %} -STATICFILES_STORAGE = 'config.settings.production.StaticRootS3Boto3Storage' -STATIC_URL = f'https://{AWS_STORAGE_BUCKET_NAME}.s3.amazonaws.com/static/' +STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage" +{%- endif -%} +{%- if cookiecutter.cloud_provider == 'AWS' %} +STATICFILES_STORAGE = "config.settings.production.StaticRootS3Boto3Storage" +STATIC_URL = f"https://{AWS_STORAGE_BUCKET_NAME}.s3.amazonaws.com/static/" +{%- elif cookiecutter.cloud_provider == 'GCE' %} +STATIC_URL = "https://storage.googleapis.com/{}/static".format(GS_BUCKET_NAME) {%- endif %} # MEDIA # ------------------------------------------------------------------------------ -{% if cookiecutter.use_whitenoise == 'y' -%} -DEFAULT_FILE_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage' -MEDIA_URL = f'https://{AWS_STORAGE_BUCKET_NAME}.s3.amazonaws.com/' -{%- else %} +{%- if cookiecutter.cloud_provider == 'AWS' %} # region http://stackoverflow.com/questions/10390244/ # Full-fledge class: https://stackoverflow.com/a/18046120/104731 from storages.backends.s3boto3 import S3Boto3Storage # noqa E402 class StaticRootS3Boto3Storage(S3Boto3Storage): - location = 'static' + location = "static" class MediaRootS3Boto3Storage(S3Boto3Storage): - location = 'media' + location = "media" file_overwrite = False # endregion -DEFAULT_FILE_STORAGE = 'config.settings.production.MediaRootS3Boto3Storage' -MEDIA_URL = f'https://{AWS_STORAGE_BUCKET_NAME}.s3.amazonaws.com/media/' +DEFAULT_FILE_STORAGE = "config.settings.production.MediaRootS3Boto3Storage" +MEDIA_URL = f"https://{AWS_STORAGE_BUCKET_NAME}.s3.amazonaws.com/media/" +{%- elif cookiecutter.cloud_provider == 'GCE' %} +MEDIA_URL = "https://storage.googleapis.com/{}/media".format(GS_BUCKET_NAME) +MEDIA_ROOT = "https://storage.googleapis.com/{}/media".format(GS_BUCKET_NAME) {%- endif %} # TEMPLATES # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#templates -TEMPLATES[0]['OPTIONS']['loaders'] = [ # noqa F405 +TEMPLATES[0]["OPTIONS"]["loaders"] = [ # noqa F405 ( - 'django.template.loaders.cached.Loader', + "django.template.loaders.cached.Loader", [ - 'django.template.loaders.filesystem.Loader', - 'django.template.loaders.app_directories.Loader', - ] - ), + "django.template.loaders.filesystem.Loader", + "django.template.loaders.app_directories.Loader", + ], + ) ] # EMAIL # ------------------------------------------------------------------------------ # https://docs.djangoproject.com/en/dev/ref/settings/#default-from-email DEFAULT_FROM_EMAIL = env( - 'DJANGO_DEFAULT_FROM_EMAIL', - default='{{cookiecutter.project_name}} ' + "DJANGO_DEFAULT_FROM_EMAIL", default="{{cookiecutter.project_name}} " ) # https://docs.djangoproject.com/en/dev/ref/settings/#server-email -SERVER_EMAIL = env('DJANGO_SERVER_EMAIL', default=DEFAULT_FROM_EMAIL) +SERVER_EMAIL = env("DJANGO_SERVER_EMAIL", default=DEFAULT_FROM_EMAIL) # https://docs.djangoproject.com/en/dev/ref/settings/#email-subject-prefix -EMAIL_SUBJECT_PREFIX = env('DJANGO_EMAIL_SUBJECT_PREFIX', default='[{{cookiecutter.project_name}}]') +EMAIL_SUBJECT_PREFIX = env( + "DJANGO_EMAIL_SUBJECT_PREFIX", default="[{{cookiecutter.project_name}}]" +) # ADMIN # ------------------------------------------------------------------------------ # Django Admin URL regex. -ADMIN_URL = env('DJANGO_ADMIN_URL') +ADMIN_URL = env("DJANGO_ADMIN_URL") # Anymail (Mailgun) # ------------------------------------------------------------------------------ # https://anymail.readthedocs.io/en/stable/installation/#installing-anymail -INSTALLED_APPS += ['anymail'] # noqa F405 -EMAIL_BACKEND = 'anymail.backends.mailgun.EmailBackend' +INSTALLED_APPS += ["anymail"] # noqa F405 +EMAIL_BACKEND = "anymail.backends.mailgun.EmailBackend" # https://anymail.readthedocs.io/en/stable/installation/#anymail-settings-reference ANYMAIL = { - 'MAILGUN_API_KEY': env('MAILGUN_API_KEY'), - 'MAILGUN_SENDER_DOMAIN': env('MAILGUN_DOMAIN') + "MAILGUN_API_KEY": env("MAILGUN_API_KEY"), + "MAILGUN_SENDER_DOMAIN": env("MAILGUN_DOMAIN"), } # Gunicorn # ------------------------------------------------------------------------------ -INSTALLED_APPS += ['gunicorn'] # noqa F405 +INSTALLED_APPS += ["gunicorn"] # noqa F405 {% if cookiecutter.use_whitenoise == 'y' -%} # WhiteNoise # ------------------------------------------------------------------------------ # http://whitenoise.evans.io/en/latest/django.html#enable-whitenoise -MIDDLEWARE.insert(1, 'whitenoise.middleware.WhiteNoiseMiddleware') # noqa F405 +MIDDLEWARE.insert(1, "whitenoise.middleware.WhiteNoiseMiddleware") # noqa F405 {% endif %} {%- if cookiecutter.use_compressor == 'y' -%} # django-compressor # ------------------------------------------------------------------------------ # https://django-compressor.readthedocs.io/en/latest/settings/#django.conf.settings.COMPRESS_ENABLED -COMPRESS_ENABLED = env.bool('COMPRESS_ENABLED', default=True) +COMPRESS_ENABLED = env.bool("COMPRESS_ENABLED", default=True) # https://django-compressor.readthedocs.io/en/latest/settings/#django.conf.settings.COMPRESS_STORAGE -COMPRESS_STORAGE = 'storages.backends.s3boto3.S3Boto3Storage' +COMPRESS_STORAGE = "storages.backends.s3boto3.S3Boto3Storage" # https://django-compressor.readthedocs.io/en/latest/settings/#django.conf.settings.COMPRESS_URL -COMPRESS_URL = STATIC_URL - +COMPRESS_URL = STATIC_URL{% if cookiecutter.use_whitenoise == 'y' %} # noqa F405{% endif %} {% endif %} {%- if cookiecutter.use_whitenoise == 'n' -%} # Collectfast # ------------------------------------------------------------------------------ # https://github.com/antonagestam/collectfast#installation -INSTALLED_APPS = ['collectfast'] + INSTALLED_APPS # noqa F405 +INSTALLED_APPS = ["collectfast"] + INSTALLED_APPS # noqa F405 AWS_PRELOAD_METADATA = True - {% endif %} -{%- if cookiecutter.use_sentry == 'y' -%} -# raven -# ------------------------------------------------------------------------------ -# https://docs.sentry.io/clients/python/integrations/django/ -INSTALLED_APPS += ['raven.contrib.django.raven_compat'] # noqa F405 -MIDDLEWARE = ['raven.contrib.django.raven_compat.middleware.SentryResponseErrorIdMiddleware'] + MIDDLEWARE - -# Sentry -# ------------------------------------------------------------------------------ -SENTRY_DSN = env('SENTRY_DSN') -SENTRY_CLIENT = env('DJANGO_SENTRY_CLIENT', default='raven.contrib.django.raven_compat.DjangoClient') -LOGGING = { - 'version': 1, - 'disable_existing_loggers': True, - 'root': { - 'level': 'WARNING', - 'handlers': ['sentry'], - }, - 'formatters': { - 'verbose': { - 'format': '%(levelname)s %(asctime)s %(module)s ' - '%(process)d %(thread)d %(message)s' - }, - }, - 'handlers': { - 'sentry': { - 'level': 'ERROR', - 'class': 'raven.contrib.django.raven_compat.handlers.SentryHandler', - }, - 'console': { - 'level': 'DEBUG', - 'class': 'logging.StreamHandler', - 'formatter': 'verbose' - } - }, - 'loggers': { - 'django.db.backends': { - 'level': 'ERROR', - 'handlers': ['console'], - 'propagate': False, - }, - 'raven': { - 'level': 'DEBUG', - 'handlers': ['console'], - 'propagate': False, - }, - 'sentry.errors': { - 'level': 'DEBUG', - 'handlers': ['console'], - 'propagate': False, - }, - 'django.security.DisallowedHost': { - 'level': 'ERROR', - 'handlers': ['console', 'sentry'], - 'propagate': False, - }, - }, -} - -SENTRY_CELERY_LOGLEVEL = env.int('DJANGO_SENTRY_LOG_LEVEL', logging.INFO) -RAVEN_CONFIG = { - 'dsn': SENTRY_DSN -} - -{%- else %} # LOGGING # ------------------------------------------------------------------------------ -# See: https://docs.djangoproject.com/en/dev/ref/settings/#logging +# https://docs.djangoproject.com/en/dev/ref/settings/#logging +# See https://docs.djangoproject.com/en/dev/topics/logging for +# more details on how to customize your logging configuration. +{% if cookiecutter.use_sentry == 'n' -%} # A sample logging configuration. The only tangible logging # performed by this configuration is to send an email to # the site admins on every HTTP 500 error when DEBUG=False. -# See https://docs.djangoproject.com/en/dev/topics/logging for -# more details on how to customize your logging configuration. LOGGING = { - 'version': 1, - 'disable_existing_loggers': False, - 'filters': { - 'require_debug_false': { - '()': 'django.utils.log.RequireDebugFalse' + "version": 1, + "disable_existing_loggers": False, + "filters": {"require_debug_false": {"()": "django.utils.log.RequireDebugFalse"}}, + "formatters": { + "verbose": { + "format": "%(levelname)s %(asctime)s %(module)s " + "%(process)d %(thread)d %(message)s" } }, - 'formatters': { - 'verbose': { - 'format': '%(levelname)s %(asctime)s %(module)s ' - '%(process)d %(thread)d %(message)s' + "handlers": { + "mail_admins": { + "level": "ERROR", + "filters": ["require_debug_false"], + "class": "django.utils.log.AdminEmailHandler", + }, + "console": { + "level": "DEBUG", + "class": "logging.StreamHandler", + "formatter": "verbose", }, }, - 'handlers': { - 'mail_admins': { - 'level': 'ERROR', - 'filters': ['require_debug_false'], - 'class': 'django.utils.log.AdminEmailHandler' + "loggers": { + "django.request": { + "handlers": ["mail_admins"], + "level": "ERROR", + "propagate": True, }, - 'console': { - 'level': 'DEBUG', - 'class': 'logging.StreamHandler', - 'formatter': 'verbose', + "django.security.DisallowedHost": { + "level": "ERROR", + "handlers": ["console", "mail_admins"], + "propagate": True, }, }, - 'loggers': { - 'django.request': { - 'handlers': ['mail_admins'], - 'level': 'ERROR', - 'propagate': True - }, - 'django.security.DisallowedHost': { - 'level': 'ERROR', - 'handlers': ['console', 'mail_admins'], - 'propagate': True +} +{% else %} +LOGGING = { + "version": 1, + "disable_existing_loggers": True, + "formatters": { + "verbose": { + "format": "%(levelname)s %(asctime)s %(module)s " + "%(process)d %(thread)d %(message)s" } - } + }, + "handlers": { + "console": { + "level": "DEBUG", + "class": "logging.StreamHandler", + "formatter": "verbose", + } + }, + "loggers": { + "django.db.backends": { + "level": "ERROR", + "handlers": ["console"], + "propagate": False, + }, + # Errors logged by the SDK itself + "sentry_sdk": {"level": "ERROR", "handlers": ["console"], "propagate": False}, + "django.security.DisallowedHost": { + "level": "ERROR", + "handlers": ["console"], + "propagate": False, + }, + }, } +# Sentry +# ------------------------------------------------------------------------------ +SENTRY_DSN = env("SENTRY_DSN") +SENTRY_LOG_LEVEL = env.int("DJANGO_SENTRY_LOG_LEVEL", logging.INFO) + +sentry_logging = LoggingIntegration( + level=SENTRY_LOG_LEVEL, # Capture info and above as breadcrumbs + event_level=None, # Send no events from log messages +) + +{%- if cookiecutter.use_celery == 'y' %} +sentry_sdk.init( + dsn=SENTRY_DSN, + integrations=[sentry_logging, DjangoIntegration(), CeleryIntegration()], +) +{% else %} +sentry_sdk.init(dsn=SENTRY_DSN, integrations=[sentry_logging, DjangoIntegration()]) +{% endif -%} {% endif %} # Your stuff... # ------------------------------------------------------------------------------ diff --git a/{{cookiecutter.project_slug}}/config/settings/test.py b/{{cookiecutter.project_slug}}/config/settings/test.py index e947d4101..2ade134a2 100644 --- a/{{cookiecutter.project_slug}}/config/settings/test.py +++ b/{{cookiecutter.project_slug}}/config/settings/test.py @@ -10,7 +10,10 @@ from .base import env # https://docs.djangoproject.com/en/dev/ref/settings/#debug DEBUG = False # https://docs.djangoproject.com/en/dev/ref/settings/#secret-key -SECRET_KEY = env("DJANGO_SECRET_KEY", default="!!!SET DJANGO_SECRET_KEY!!!") +SECRET_KEY = env( + "DJANGO_SECRET_KEY", + default="!!!SET DJANGO_SECRET_KEY!!!", +) # https://docs.djangoproject.com/en/dev/ref/settings/#test-runner TEST_RUNNER = "django.test.runner.DiscoverRunner" @@ -19,7 +22,8 @@ TEST_RUNNER = "django.test.runner.DiscoverRunner" # https://docs.djangoproject.com/en/dev/ref/settings/#caches CACHES = { "default": { - "BACKEND": "django.core.cache.backends.locmem.LocMemCache", "LOCATION": "" + "BACKEND": "django.core.cache.backends.locmem.LocMemCache", + "LOCATION": "", } } diff --git a/{{cookiecutter.project_slug}}/config/urls.py b/{{cookiecutter.project_slug}}/config/urls.py index 62d86c94d..909d5e86e 100644 --- a/{{cookiecutter.project_slug}}/config/urls.py +++ b/{{cookiecutter.project_slug}}/config/urls.py @@ -8,22 +8,15 @@ from django.views import defaults as default_views urlpatterns = [ path("", TemplateView.as_view(template_name="pages/home.html"), name="home"), path( - "about/", - TemplateView.as_view(template_name="pages/about.html"), - name="about", + "about/", TemplateView.as_view(template_name="pages/about.html"), name="about" ), # Django Admin, use {% raw %}{% url 'admin:index' %}{% endraw %} path(settings.ADMIN_URL, admin.site.urls), # User management - path( - "users/", - include("{{ cookiecutter.project_slug }}.users.urls", namespace="users"), - ), + path("users/", include("{{ cookiecutter.project_slug }}.users.urls", namespace="users")), path("accounts/", include("allauth.urls")), # Your stuff: custom urls includes go here -] + static( - settings.MEDIA_URL, document_root=settings.MEDIA_ROOT -) +] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT) if settings.DEBUG: # This allows the error pages to be debugged during development, just visit diff --git a/{{cookiecutter.project_slug}}/config/wsgi.py b/{{cookiecutter.project_slug}}/config/wsgi.py index 4c1350061..1899a30ca 100644 --- a/{{cookiecutter.project_slug}}/config/wsgi.py +++ b/{{cookiecutter.project_slug}}/config/wsgi.py @@ -20,13 +20,10 @@ from django.core.wsgi import get_wsgi_application # This allows easy placement of apps within the interior # {{ cookiecutter.project_slug }} directory. -app_path = os.path.abspath(os.path.join( - os.path.dirname(os.path.abspath(__file__)), os.pardir)) -sys.path.append(os.path.join(app_path, '{{ cookiecutter.project_slug }}')) -{% if cookiecutter.use_sentry == 'y' -%} -if os.environ.get('DJANGO_SETTINGS_MODULE') == 'config.settings.production': - from raven.contrib.django.raven_compat.middleware.wsgi import Sentry -{%- endif %} +app_path = os.path.abspath( + os.path.join(os.path.dirname(os.path.abspath(__file__)), os.pardir) +) +sys.path.append(os.path.join(app_path, "{{ cookiecutter.project_slug }}")) # We defer to a DJANGO_SETTINGS_MODULE already in the environment. This breaks # if running multiple sites in the same mod_wsgi process. To fix this, use # mod_wsgi daemon mode with each site in its own daemon process, or use @@ -37,10 +34,6 @@ os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.production") # file. This includes Django's development server, if the WSGI_APPLICATION # setting points here. application = get_wsgi_application() -{% if cookiecutter.use_sentry == 'y' -%} -if os.environ.get('DJANGO_SETTINGS_MODULE') == 'config.settings.production': - application = Sentry(application) -{%- endif %} # Apply WSGI middleware here. # from helloworld.wsgi import HelloWorldApplication # application = HelloWorldApplication(application) diff --git a/{{cookiecutter.project_slug}}/docs/deploy.rst b/{{cookiecutter.project_slug}}/docs/deploy.rst deleted file mode 100644 index 1e642c798..000000000 --- a/{{cookiecutter.project_slug}}/docs/deploy.rst +++ /dev/null @@ -1,4 +0,0 @@ -Deploy -======== - -This is where you describe how the project is deployed in production. diff --git a/{{cookiecutter.project_slug}}/docs/docker_ec2.rst b/{{cookiecutter.project_slug}}/docs/docker_ec2.rst deleted file mode 100644 index 606d29563..000000000 --- a/{{cookiecutter.project_slug}}/docs/docker_ec2.rst +++ /dev/null @@ -1,186 +0,0 @@ -Developing with Docker -====================== - -You can develop your application in a `Docker`_ container for simpler deployment onto bare Linux machines later. This instruction assumes an `Amazon Web Services`_ EC2 instance, but it should work on any machine with Docker > 1.3 and `Docker compose`_ installed. - -.. _Docker: https://www.docker.com/ -.. _Amazon Web Services: http://aws.amazon.com/ -.. _Docker compose: https://docs.docker.com/compose/ - -Setting up -^^^^^^^^^^ - -Docker encourages running one container for each process. This might mean one container for your web server, one for Django application and a third for your database. Once you're happy composing containers in this way you can easily add more, such as a `Redis`_ cache. - -.. _Redis: http://redis.io/ - -The Docker compose tool (previously known as `fig`_) makes linking these containers easy. An example set up for your Cookiecutter Django project might look like this: - -.. _fig: http://www.fig.sh/ - -:: - - webapp/ # Your cookiecutter project would be in here - Dockerfile - ... - database/ - Dockerfile - ... - webserver/ - Dockerfile - ... - production.yml - -Each component of your application would get its own `Dockerfile`_. The rest of this example assumes you are using the `base postgres image`_ for your database. Your database settings in `config/base.py` might then look something like: - -.. _Dockerfile: https://docs.docker.com/reference/builder/ -.. _base postgres image: https://registry.hub.docker.com/_/postgres/ - -.. code-block:: python - - DATABASES = { - 'default': { - 'ENGINE': 'django.db.backends.postgresql_psycopg2', - 'NAME': 'postgres', - 'USER': 'postgres', - 'HOST': 'database', - 'PORT': 5432, - } - } - -The `Docker compose documentation`_ explains in detail what you can accomplish in the `production.yml` file, but an example configuration might look like this: - -.. _Docker compose documentation: https://docs.docker.com/compose/#compose-documentation - -.. code-block:: yaml - - database: - build: database - webapp: - build: webapp: - command: /usr/bin/python3.6 manage.py runserver 0.0.0.0:8000 # dev setting - # command: gunicorn -b 0.0.0.0:8000 wsgi:application # production setting - volumes: - - webapp/your_project_name:/path/to/container/workdir/ - links: - - database - webserver: - build: webserver - ports: - - "80:80" - - "443:443" - links: - - webapp - -We'll ignore the webserver for now (you'll want to comment that part out while we do). A working Dockerfile to run your cookiecutter application might look like this: - -:: - - FROM ubuntu:14.04 - ENV REFRESHED_AT 2015-01-13 - - # update packages and prepare to build software - RUN ["apt-get", "update"] - RUN ["apt-get", "-y", "install", "build-essential", "vim", "git", "curl"] - RUN ["locale-gen", "en_GB.UTF-8"] - - # install latest python - RUN ["apt-get", "-y", "build-dep", "python3-dev", "python3-imaging"] - RUN ["apt-get", "-y", "install", "python3-dev", "python3-imaging", "python3-pip"] - - # prepare postgreSQL support - RUN ["apt-get", "-y", "build-dep", "python3-psycopg2"] - - # move into our working directory - # ADD must be after chown see http://stackoverflow.com/a/26145444/1281947 - RUN ["groupadd", "python"] - RUN ["useradd", "python", "-s", "/bin/bash", "-m", "-g", "python", "-G", "python"] - ENV HOME /home/python - WORKDIR /home/python - RUN ["chown", "-R", "python:python", "/home/python"] - ADD ./ /home/python - - # manage requirements - ENV REQUIREMENTS_REFRESHED_AT 2015-02-25 - RUN ["pip3", "install", "-r", "requirements.txt"] - - # uncomment the line below to use container as a non-root user - USER python:python - -Running `sudo docker-compose -f production.yml build` will follow the instructions in your `production.yml` file and build the database container, then your webapp, before mounting your cookiecutter project files as a volume in the webapp container and linking to the database. Our example yaml file runs in development mode but changing it to production mode is as simple as commenting out the line using `runserver` and uncommenting the line using `gunicorn`. - -Both are set to run on port `0.0.0.0:8000`, which is where the Docker daemon will discover it. You can now run `sudo docker-compose -f production.yml up` and browse to `localhost:8000` to see your application running. - -Deployment -^^^^^^^^^^ - -You'll need a webserver container for deployment. An example setup for `Nginx`_ might look like this: - -.. _Nginx: http://wiki.nginx.org/Main - -:: - - FROM ubuntu:14.04 - ENV REFRESHED_AT 2015-02-11 - - # get the nginx package and set it up - RUN ["apt-get", "update"] - RUN ["apt-get", "-y", "install", "nginx"] - - # forward request and error logs to docker log collector - RUN ln -sf /dev/stdout /var/log/nginx/access.log - RUN ln -sf /dev/stderr /var/log/nginx/error.log - VOLUME ["/var/cache/nginx"] - EXPOSE 80 443 - - # load nginx conf - ADD ./site.conf /etc/nginx/sites-available/your_cookiecutter_project - RUN ["ln", "-s", "/etc/nginx/sites-available/your_cookiecutter_project", "/etc/nginx/sites-enabled/your_cookiecutter_project"] - RUN ["rm", "-rf", "/etc/nginx/sites-available/default"] - - #start the server - CMD ["nginx", "-g", "daemon off;"] - -That Dockerfile assumes you have an Nginx conf file named `site.conf` in the same directory as the webserver Dockerfile. A very basic example, which forwards traffic onto the development server or gunicorn for processing, would look like this: - -:: - - # see http://serverfault.com/questions/577370/how-can-i-use-environment-variables-in-nginx-conf#comment730384_577370 - upstream localhost { - server webapp_1:8000; - } - server { - location / { - proxy_pass http://localhost; - } - } - -Running `sudo docker-compose -f production.yml build webserver` will build your server container. Running `sudo docker-compose -f production.yml up` will now expose your application directly on `localhost` (no need to specify the port number). - -Building and running your app on EC2 -^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^ - -All you now need to do to run your app in production is: - -* Create an empty EC2 Linux instance (any Linux machine should do). - -* Install your preferred source control solution, Docker and Docker compose on the news instance. - -* Pull in your code from source control. The root directory should be the one with your `production.yml` file in it. - -* Run `sudo docker-compose -f production.yml build` and `sudo docker-compose -f production.yml up`. - -* Assign an `Elastic IP address`_ to your new machine. - -.. _Elastic IP address: https://aws.amazon.com/articles/1346 - -* Point your domain name to the elastic IP. - -**Be careful with Elastic IPs** because, on the AWS free tier, if you assign one and then stop the machine you will incur charges while the machine is down (presumably because you're preventing them allocating the IP to someone else). - -Security advisory -^^^^^^^^^^^^^^^^^ - -The setup described in this instruction will get you up-and-running but it hasn't been audited for security. If you are running your own setup like this it is always advisable to, at a minimum, examine your application with a tool like `OWASP ZAP`_ to see what security holes you might be leaving open. - -.. _OWASP ZAP: https://www.owasp.org/index.php/OWASP_Zed_Attack_Proxy_Project diff --git a/{{cookiecutter.project_slug}}/docs/index.rst b/{{cookiecutter.project_slug}}/docs/index.rst index 21ef98ee5..96752d807 100644 --- a/{{cookiecutter.project_slug}}/docs/index.rst +++ b/{{cookiecutter.project_slug}}/docs/index.rst @@ -3,23 +3,17 @@ You can adapt this file completely to your liking, but it should at least contain the root `toctree` directive. -Welcome to {{ cookiecutter.project_name }}'s documentation! +{{ cookiecutter.project_name }} Project Documentation ==================================================================== -Contents: +Table of Contents: .. toctree:: :maxdepth: 2 - install - deploy - docker_ec2 - tests - - -Indices and tables -================== +Indices & Tables +================ * :ref:`genindex` * :ref:`modindex` diff --git a/{{cookiecutter.project_slug}}/docs/install.rst b/{{cookiecutter.project_slug}}/docs/install.rst deleted file mode 100644 index 1bc03335d..000000000 --- a/{{cookiecutter.project_slug}}/docs/install.rst +++ /dev/null @@ -1,4 +0,0 @@ -Install -========= - -This is where you write how to get a new laptop to run this project. diff --git a/{{cookiecutter.project_slug}}/gulpfile.js b/{{cookiecutter.project_slug}}/gulpfile.js index bb18a535d..3c28a7359 100644 --- a/{{cookiecutter.project_slug}}/gulpfile.js +++ b/{{cookiecutter.project_slug}}/gulpfile.js @@ -1,63 +1,70 @@ +//////////////////////////////// +// Setup +//////////////////////////////// -//////////////////////////////// - //Setup// -//////////////////////////////// +// Gulp and package +const { src, dest, parallel, series, watch } = require('gulp') +const pjson = require('./package.json') // Plugins -var gulp = require('gulp'), - pjson = require('./package.json'), - gutil = require('gulp-util'), - sass = require('gulp-sass'), - autoprefixer = require('gulp-autoprefixer'), - cssnano = require('gulp-cssnano'), - {% if cookiecutter.custom_bootstrap_compilation == 'y' %} - concat = require('gulp-concat'), - {% endif %} - rename = require('gulp-rename'), - del = require('del'), - plumber = require('gulp-plumber'), - pixrem = require('gulp-pixrem'), - uglify = require('gulp-uglify'), - imagemin = require('gulp-imagemin'), - spawn = require('child_process').spawn, - runSequence = require('run-sequence'), - browserSync = require('browser-sync').create(), - reload = browserSync.reload; - +const autoprefixer = require('autoprefixer') +const browserSync = require('browser-sync').create() +{% if cookiecutter.custom_bootstrap_compilation == 'y' %} +const concat = require('gulp-concat') +{% endif %} +const cssnano = require ('cssnano') +const imagemin = require('gulp-imagemin') +const pixrem = require('pixrem') +const plumber = require('gulp-plumber') +const postcss = require('gulp-postcss') +const reload = browserSync.reload +const rename = require('gulp-rename') +const sass = require('gulp-sass') +const spawn = require('child_process').spawn +const uglify = require('gulp-uglify-es').default // Relative paths function -var pathsConfig = function (appName) { - this.app = "./" + (appName || pjson.name); - var vendorsRoot = 'node_modules/'; +function pathsConfig(appName) { + this.app = `./${pjson.name}` + const vendorsRoot = 'node_modules' return { {% if cookiecutter.custom_bootstrap_compilation == 'y' %} - bootstrapSass: vendorsRoot + '/bootstrap/scss', + bootstrapSass: `${vendorsRoot}/bootstrap/scss`, vendorsJs: [ - vendorsRoot + 'jquery/dist/jquery.slim.js', - vendorsRoot + 'popper.js/dist/umd/popper.js', - vendorsRoot + 'bootstrap/dist/js/bootstrap.js' + `${vendorsRoot}/jquery/dist/jquery.slim.js`, + `${vendorsRoot}/popper.js/dist/umd/popper.js`, + `${vendorsRoot}/bootstrap/dist/js/bootstrap.js`, ], {% endif %} app: this.app, - templates: this.app + '/templates', - css: this.app + '/static/css', - sass: this.app + '/static/sass', - fonts: this.app + '/static/fonts', - images: this.app + '/static/images', - js: this.app + '/static/js' + templates: `${this.app}/templates`, + css: `${this.app}/static/css`, + sass: `${this.app}/static/sass`, + fonts: `${this.app}/static/fonts`, + images: `${this.app}/static/images`, + js: `${this.app}/static/js`, } -}; +} -var paths = pathsConfig(); +var paths = pathsConfig() //////////////////////////////// - //Tasks// +// Tasks //////////////////////////////// // Styles autoprefixing and minification -gulp.task('styles', function() { - return gulp.src(paths.sass + '/project.scss') +function styles() { + var processCss = [ + autoprefixer(), // adds vendor prefixes + pixrem(), // add fallbacks for rem units + ] + + var minifyCss = [ + cssnano({ preset: 'default' }) // minify result + ] + + return src(`${paths.sass}/project.scss`) .pipe(sass({ includePaths: [ {% if cookiecutter.custom_bootstrap_compilation == 'y' %} @@ -67,72 +74,104 @@ gulp.task('styles', function() { ] }).on('error', sass.logError)) .pipe(plumber()) // Checks for errors - .pipe(autoprefixer({browsers: ['last 2 versions']})) // Adds vendor prefixes - .pipe(pixrem()) // add fallbacks for rem units - .pipe(gulp.dest(paths.css)) + .pipe(postcss(processCss)) + .pipe(dest(paths.css)) .pipe(rename({ suffix: '.min' })) - .pipe(cssnano()) // Minifies the result - .pipe(gulp.dest(paths.css)); -}); + .pipe(postcss(minifyCss)) // Minifies the result + .pipe(dest(paths.css)) +} // Javascript minification -gulp.task('scripts', function() { - return gulp.src(paths.js + '/project.js') +function scripts() { + return src(`${paths.js}/project.js`) .pipe(plumber()) // Checks for errors .pipe(uglify()) // Minifies the js .pipe(rename({ suffix: '.min' })) - .pipe(gulp.dest(paths.js)); -}); - + .pipe(dest(paths.js)) +} {% if cookiecutter.custom_bootstrap_compilation == 'y' %} // Vendor Javascript minification -gulp.task('vendor-scripts', function() { - return gulp.src(paths.vendorsJs) +function vendorScripts() { + return src(paths.vendorsJs) .pipe(concat('vendors.js')) - .pipe(gulp.dest(paths.js)) + .pipe(dest(paths.js)) .pipe(plumber()) // Checks for errors .pipe(uglify()) // Minifies the js .pipe(rename({ suffix: '.min' })) - .pipe(gulp.dest(paths.js)); -}); + .pipe(dest(paths.js)) +} {% endif %} // Image compression -gulp.task('imgCompression', function(){ - return gulp.src(paths.images + '/*') +function imgCompression() { + return src(`${paths.images}/*`) .pipe(imagemin()) // Compresses PNG, JPEG, GIF and SVG images - .pipe(gulp.dest(paths.images)) -}); + .pipe(dest(paths.images)) +} // Run django server -gulp.task('runServer', function(cb) { - var cmd = spawn('python', ['manage.py', 'runserver'], {stdio: 'inherit'}); +function runServer(cb) { + var cmd = spawn('python', ['manage.py', 'runserver'], {stdio: 'inherit'}) cmd.on('close', function(code) { - console.log('runServer exited with code ' + code); - cb(code); - }); -}); + console.log('runServer exited with code ' + code) + cb(code) + }) +} // Browser sync server for live reload -gulp.task('browserSync', function() { +function initBrowserSync() { browserSync.init( - [paths.css + "/*.css", paths.js + "*.js", paths.templates + '*.html'], { - proxy: "localhost:8000" - }); -}); + [ + `${paths.css}/*.css`, + `${paths.js}/*.js`, + `${paths.templates}/*.html` + ], { + // https://www.browsersync.io/docs/options/#option-proxy + {%- if cookiecutter.use_docker == 'n' %} + proxy: 'localhost:8000' + {% else %} + proxy: { + target: 'django:8000', + proxyReq: [ + function(proxyReq, req) { + // Assign proxy "host" header same as current request at Browsersync server + proxyReq.setHeader('Host', req.headers.host) + } + ] + }, + // https://www.browsersync.io/docs/options/#option-open + // Disable as it doesn't work from inside a container + open: false + {%- endif %} + } + ) +} // Watch -gulp.task('watch', function() { +function watchPaths() { + watch(`${paths.sass}/*.scss`, styles) + watch(`${paths.templates}/**/*.html`).on("change", reload) + watch([`${paths.js}/*.js`, `!${paths.js}/*.min.js`], scripts).on("change", reload) +} - gulp.watch(paths.sass + '/*.scss', ['styles']); - gulp.watch(paths.js + '/*.js', ['scripts']).on("change", reload); - gulp.watch(paths.images + '/*', ['imgCompression']); - gulp.watch(paths.templates + '/**/*.html').on("change", reload); +// Generate all assets +const generateAssets = parallel( + styles, + scripts, + {% if cookiecutter.custom_bootstrap_compilation == 'y' %}vendorScripts,{% endif %} + imgCompression +) -}); +// Set up dev environment +const dev = parallel( + {%- if cookiecutter.use_docker == 'n' %} + runServer, + {%- endif %} + initBrowserSync, + watchPaths +) -// Default task -gulp.task('default', function() { - runSequence(['styles', 'scripts', {% if cookiecutter.custom_bootstrap_compilation == 'y' %}'vendor-scripts', {% endif %}'imgCompression'], ['runServer', 'browserSync', 'watch']); -}); +exports.default = series(generateAssets, dev) +exports["generate-assets"] = generateAssets +exports["dev"] = dev diff --git a/{{cookiecutter.project_slug}}/local.yml b/{{cookiecutter.project_slug}}/local.yml index 366389320..c6dd654e5 100644 --- a/{{cookiecutter.project_slug}}/local.yml +++ b/{{cookiecutter.project_slug}}/local.yml @@ -79,3 +79,23 @@ services: command: /start-flower {%- endif %} + {%- if cookiecutter.js_task_runner == 'Gulp' %} + + node: + build: + context: . + dockerfile: ./compose/local/node/Dockerfile + image: {{ cookiecutter.project_slug }}_local_node + depends_on: + - django + volumes: + - .:/app + # http://jdlm.info/articles/2016/03/06/lessons-building-node-app-docker.html + - /app/node_modules + command: npm run dev + ports: + - "3000:3000" + # Expose browsersync UI: https://www.browsersync.io/docs/options/#option-ui + - "3001:3001" + + {%- endif %} diff --git a/{{cookiecutter.project_slug}}/merge_production_dotenvs_in_dotenv.py b/{{cookiecutter.project_slug}}/merge_production_dotenvs_in_dotenv.py index 5c3027a40..4e70e2adb 100644 --- a/{{cookiecutter.project_slug}}/merge_production_dotenvs_in_dotenv.py +++ b/{{cookiecutter.project_slug}}/merge_production_dotenvs_in_dotenv.py @@ -8,7 +8,6 @@ PRODUCTION_DOTENVS_DIR_PATH = os.path.join(ROOT_DIR_PATH, ".envs", ".production" PRODUCTION_DOTENV_FILE_PATHS = [ os.path.join(PRODUCTION_DOTENVS_DIR_PATH, ".django"), os.path.join(PRODUCTION_DOTENVS_DIR_PATH, ".postgres"), - os.path.join(PRODUCTION_DOTENVS_DIR_PATH, ".caddy"), ] DOTENV_FILE_PATH = os.path.join(ROOT_DIR_PATH, ".env") diff --git a/{{cookiecutter.project_slug}}/package.json b/{{cookiecutter.project_slug}}/package.json index b29d52963..6edf2e114 100644 --- a/{{cookiecutter.project_slug}}/package.json +++ b/{{cookiecutter.project_slug}}/package.json @@ -5,36 +5,34 @@ "devDependencies": { {% if cookiecutter.js_task_runner == 'Gulp' -%} {% if cookiecutter.custom_bootstrap_compilation == 'y' -%} - "bootstrap": "4.1.1", - {% endif -%} - "browser-sync": "^2.14.0", - "del": "^2.2.2", - "gulp": "^3.9.1", - "gulp-autoprefixer": "^5.0.0", - {% if cookiecutter.custom_bootstrap_compilation == 'y' -%} + "bootstrap": "4.3.1", "gulp-concat": "^2.6.1", - {% endif -%} - "gulp-cssnano": "^2.1.2", - "gulp-imagemin": "^4.1.0", - "gulp-pixrem": "^1.0.0", - "gulp-plumber": "^1.1.0", - "gulp-rename": "^1.2.2", - "gulp-sass": "^3.1.0", - "gulp-uglify": "^3.0.0", - "gulp-util": "^3.0.7", - {% if cookiecutter.custom_bootstrap_compilation == 'y' -%} "jquery": "3.3.1", "popper.js": "1.14.3", {% endif -%} - "run-sequence": "^2.1.1" + "autoprefixer": "^9.4.7", + "browser-sync": "^2.14.0", + "cssnano": "^4.1.10", + "gulp": "^4.0.0", + "gulp-imagemin": "^5.0.3", + "gulp-plumber": "^1.2.1", + "gulp-postcss": "^8.0.0", + "gulp-rename": "^1.2.2", + "gulp-sass": "^4.0.2", + "gulp-uglify-es": "^1.0.4", + "pixrem": "^5.0.0" {%- endif %} }, "engines": { - "node": ">=0.8.0" + "node": ">=8" }, + "browserslist": [ + "last 2 versions" + ], "scripts": { {% if cookiecutter.js_task_runner == 'Gulp' -%} - "dev": "gulp" + "dev": "gulp", + "build": "gulp generate-assets" {%- endif %} } } diff --git a/{{cookiecutter.project_slug}}/production.yml b/{{cookiecutter.project_slug}}/production.yml index fd8388acb..a24ba8297 100644 --- a/{{cookiecutter.project_slug}}/production.yml +++ b/{{cookiecutter.project_slug}}/production.yml @@ -3,7 +3,7 @@ version: '3' volumes: production_postgres_data: {} production_postgres_data_backups: {} - production_caddy: {} + production_traefik: {} services: django:{% if cookiecutter.use_celery == 'y' %} &django{% endif %} @@ -30,17 +30,15 @@ services: env_file: - ./.envs/.production/.postgres - caddy: + traefik: build: context: . - dockerfile: ./compose/production/caddy/Dockerfile - image: {{ cookiecutter.project_slug }}_production_caddy + dockerfile: ./compose/production/traefik/Dockerfile + image: {{ cookiecutter.project_slug }}_production_traefik depends_on: - django volumes: - - production_caddy:/root/.caddy - env_file: - - ./.envs/.production/.caddy + - production_traefik:/etc/traefik/acme ports: - "0.0.0.0:80:80" - "0.0.0.0:443:443" @@ -67,3 +65,11 @@ services: command: /start-flower {%- endif %} + awscli: + build: + context: . + dockerfile: ./compose/production/aws/Dockerfile + env_file: + - ./.envs/.production/.django + volumes: + - production_postgres_data_backups:/backups diff --git a/{{cookiecutter.project_slug}}/requirements/base.txt b/{{cookiecutter.project_slug}}/requirements/base.txt index 6f94268b3..9d4aec7dc 100644 --- a/{{cookiecutter.project_slug}}/requirements/base.txt +++ b/{{cookiecutter.project_slug}}/requirements/base.txt @@ -1,27 +1,27 @@ pytz==2018.9 # https://github.com/stub42/pytz -python-slugify==2.0.1 # https://github.com/un33k/python-slugify -Pillow==5.4.1 # https://github.com/python-pillow/Pillow +python-slugify==3.0.2 # https://github.com/un33k/python-slugify +Pillow==6.0.0 # https://github.com/python-pillow/Pillow {%- if cookiecutter.use_compressor == "y" %} -rcssmin==1.0.6{% if cookiecutter.windows == 'y' %} --install-option="--without-c-extensions"{% endif %} # https://github.com/ndparker/rcssmin +rcssmin==1.0.6{% if cookiecutter.windows == 'y' and cookiecutter.use_docker == 'n' %} --install-option="--without-c-extensions"{% endif %} # https://github.com/ndparker/rcssmin {%- endif %} argon2-cffi==19.1.0 # https://github.com/hynek/argon2_cffi {%- if cookiecutter.use_whitenoise == 'y' %} whitenoise==4.1.2 # https://github.com/evansd/whitenoise {%- endif %} -redis>=2.10.6, < 3 # pyup: < 3 # https://github.com/antirez/redis +redis==3.2.1 # https://github.com/antirez/redis {%- if cookiecutter.use_celery == "y" %} -celery==4.2.1 # pyup: < 5.0 # https://github.com/celery/celery +celery==4.3.0 # pyup: < 5.0 # https://github.com/celery/celery {%- if cookiecutter.use_docker == 'y' %} -flower==0.9.2 # https://github.com/mher/flower +flower==0.9.3 # https://github.com/mher/flower {%- endif %} {%- endif %} # Django # ------------------------------------------------------------------------------ -django==2.0.10 # pyup: < 2.1 # https://www.djangoproject.com/ +django==2.0.13 # pyup: < 2.1 # https://www.djangoproject.com/ django-environ==0.4.5 # https://github.com/joke2k/django-environ django-model-utils==3.1.2 # https://github.com/jazzband/django-model-utils -django-allauth==0.38.0 # https://github.com/pennersr/django-allauth +django-allauth==0.39.1 # https://github.com/pennersr/django-allauth django-crispy-forms==1.7.2 # https://github.com/django-crispy-forms/django-crispy-forms {%- if cookiecutter.use_compressor == "y" %} django-compressor==2.2 # https://github.com/django-compressor/django-compressor @@ -29,5 +29,5 @@ django-compressor==2.2 # https://github.com/django-compressor/django-compressor django-redis==4.10.0 # https://github.com/niwinz/django-redis # Django REST Framework -djangorestframework==3.9.0 # https://github.com/encode/django-rest-framework +djangorestframework==3.9.2 # https://github.com/encode/django-rest-framework coreapi==2.3.3 # https://github.com/core-api/python-client diff --git a/{{cookiecutter.project_slug}}/requirements/local.txt b/{{cookiecutter.project_slug}}/requirements/local.txt index 7ee9a1ee3..eb81e7d02 100644 --- a/{{cookiecutter.project_slug}}/requirements/local.txt +++ b/{{cookiecutter.project_slug}}/requirements/local.txt @@ -1,30 +1,35 @@ -r ./base.txt -Werkzeug==0.14.1 # https://github.com/pallets/werkzeug -ipdb==0.11 # https://github.com/gotcha/ipdb -Sphinx==1.8.3 # https://github.com/sphinx-doc/sphinx +Werkzeug==0.15.2 # https://github.com/pallets/werkzeug +ipdb==0.12 # https://github.com/gotcha/ipdb +Sphinx==2.0.0 # https://github.com/sphinx-doc/sphinx {%- if cookiecutter.use_docker == 'y' %} -psycopg2==2.7.4 --no-binary psycopg2 # https://github.com/psycopg/psycopg2 +psycopg2==2.8 --no-binary psycopg2 # https://github.com/psycopg/psycopg2 {%- else %} -psycopg2-binary==2.7.7 # https://github.com/psycopg/psycopg2 +psycopg2-binary==2.8 # https://github.com/psycopg/psycopg2 {%- endif %} # Testing # ------------------------------------------------------------------------------ -mypy==0.660 # https://github.com/python/mypy -pytest==4.1.1 # https://github.com/pytest-dev/pytest +mypy==0.700 # https://github.com/python/mypy +pytest==4.4.0 # https://github.com/pytest-dev/pytest pytest-sugar==0.9.2 # https://github.com/Frozenball/pytest-sugar # Code quality # ------------------------------------------------------------------------------ -flake8==3.6.0 # https://github.com/PyCQA/flake8 -coverage==4.5.2 # https://github.com/nedbat/coveragepy +flake8==3.7.5 # https://github.com/PyCQA/flake8 +coverage==4.5.3 # https://github.com/nedbat/coveragepy +black==19.3b0 # https://github.com/ambv/black +pylint-django==2.0.6 # https://github.com/PyCQA/pylint-django +{%- if cookiecutter.use_celery == 'y' %} +pylint-celery==0.3 # https://github.com/PyCQA/pylint-celery +{%- endif %} # Django # ------------------------------------------------------------------------------ factory-boy==2.11.1 # https://github.com/FactoryBoy/factory_boy django-debug-toolbar==1.11 # https://github.com/jazzband/django-debug-toolbar -django-extensions==2.1.4 # https://github.com/django-extensions/django-extensions +django-extensions==2.1.6 # https://github.com/django-extensions/django-extensions django-coverage-plugin==1.6.0 # https://github.com/nedbat/django_coverage_plugin -pytest-django==3.4.5 # https://github.com/pytest-dev/pytest-django +pytest-django==3.4.8 # https://github.com/pytest-dev/pytest-django diff --git a/{{cookiecutter.project_slug}}/requirements/production.txt b/{{cookiecutter.project_slug}}/requirements/production.txt index 34877e3fd..d9fa2550d 100644 --- a/{{cookiecutter.project_slug}}/requirements/production.txt +++ b/{{cookiecutter.project_slug}}/requirements/production.txt @@ -3,15 +3,19 @@ -r ./base.txt gunicorn==19.9.0 # https://github.com/benoitc/gunicorn -psycopg2==2.7.4 --no-binary psycopg2 # https://github.com/psycopg/psycopg2 +psycopg2==2.8 --no-binary psycopg2 # https://github.com/psycopg/psycopg2 {%- if cookiecutter.use_whitenoise == 'n' %} Collectfast==0.6.2 # https://github.com/antonagestam/collectfast {%- endif %} {%- if cookiecutter.use_sentry == "y" %} -raven==6.10.0 # https://github.com/getsentry/raven-python +sentry-sdk==0.7.10 # https://github.com/getsentry/sentry-python {%- endif %} # Django # ------------------------------------------------------------------------------ +{%- if cookiecutter.cloud_provider == 'AWS' %} django-storages[boto3]==1.7.1 # https://github.com/jschneier/django-storages -django-anymail[mailgun]==5.0 # https://github.com/anymail/django-anymail \ No newline at end of file +{%- elif cookiecutter.cloud_provider == 'GCE' %} +django-storages[google]==1.7.1 # https://github.com/jschneier/django-storages +{%- endif %} +django-anymail[mailgun]==6.0 # https://github.com/anymail/django-anymail diff --git a/{{cookiecutter.project_slug}}/runtime.txt b/{{cookiecutter.project_slug}}/runtime.txt index 1935e9778..9fbd3bf0a 100644 --- a/{{cookiecutter.project_slug}}/runtime.txt +++ b/{{cookiecutter.project_slug}}/runtime.txt @@ -1 +1 @@ -python-3.6.6 +python-3.6.8 diff --git a/{{cookiecutter.project_slug}}/utility/requirements-bionic.apt b/{{cookiecutter.project_slug}}/utility/requirements-bionic.apt new file mode 100644 index 000000000..1ca82b264 --- /dev/null +++ b/{{cookiecutter.project_slug}}/utility/requirements-bionic.apt @@ -0,0 +1,23 @@ +##basic build dependencies of various Django apps for Ubuntu Bionic 18.04 +#build-essential metapackage install: make, gcc, g++, +build-essential +#required to translate +gettext +python3-dev + +##shared dependencies of: +##Pillow, pylibmc +zlib1g-dev + +##Postgresql and psycopg2 dependencies +libpq-dev + +##Pillow dependencies +libtiff5-dev +libjpeg8-dev +libfreetype6-dev +liblcms2-dev +libwebp-dev + +##django-extensions +libgraphviz-dev diff --git a/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/taskapp/celery.py b/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/taskapp/celery.py index 570abc125..529da1ae0 100644 --- a/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/taskapp/celery.py +++ b/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/taskapp/celery.py @@ -1,4 +1,4 @@ -{% if cookiecutter.use_celery == 'y' %} +{% if cookiecutter.use_celery == 'y' -%} import os from celery import Celery from django.apps import apps, AppConfig @@ -7,51 +7,31 @@ from django.conf import settings if not settings.configured: # set the default Django settings module for the 'celery' program. - os.environ.setdefault('DJANGO_SETTINGS_MODULE', 'config.settings.local') # pragma: no cover + os.environ.setdefault( + "DJANGO_SETTINGS_MODULE", "config.settings.local" + ) # pragma: no cover -app = Celery('{{cookiecutter.project_slug}}') +app = Celery("{{cookiecutter.project_slug}}") # Using a string here means the worker will not have to # pickle the object when using Windows. # - namespace='CELERY' means all celery-related configuration keys # should have a `CELERY_` prefix. -app.config_from_object('django.conf:settings', namespace='CELERY') +app.config_from_object("django.conf:settings", namespace="CELERY") class CeleryAppConfig(AppConfig): - name = '{{cookiecutter.project_slug}}.taskapp' - verbose_name = 'Celery Config' + name = "{{cookiecutter.project_slug}}.taskapp" + verbose_name = "Celery Config" def ready(self): installed_apps = [app_config.name for app_config in apps.get_app_configs()] app.autodiscover_tasks(lambda: installed_apps, force=True) - {% if cookiecutter.use_sentry == 'y' -%} - if hasattr(settings, 'RAVEN_CONFIG'): - # Celery signal registration -{% if cookiecutter.use_pycharm == 'y' -%} - # Since raven is required in production only, - # imports might (most surely will) be wiped out - # during PyCharm code clean up started - # in other environments. - # @formatter:off -{%- endif %} - from raven import Client as RavenClient - from raven.contrib.celery import register_signal as raven_register_signal - from raven.contrib.celery import register_logger_signal as raven_register_logger_signal -{% if cookiecutter.use_pycharm == 'y' -%} - # @formatter:on -{%- endif %} - - raven_client = RavenClient(dsn=settings.RAVEN_CONFIG['dsn']) - raven_register_logger_signal(raven_client) - raven_register_signal(raven_client) - {%- endif %} - @app.task(bind=True) def debug_task(self): - print(f'Request: {self.request!r}') # pragma: no cover + print(f"Request: {self.request!r}") # pragma: no cover {% else %} # Use this as a starting point for your project with celery. # If you are not using celery, you can remove this app diff --git a/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/templates/base.html b/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/templates/base.html index be99a0c31..7395f4a6c 100644 --- a/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/templates/base.html +++ b/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/templates/base.html @@ -17,8 +17,8 @@ {% block css %} {% endraw %}{% if cookiecutter.custom_bootstrap_compilation == "n" %}{% raw %} - - + + {% endraw %}{% endif %}{% raw %} @@ -104,10 +104,10 @@ {% endraw %}{% if cookiecutter.use_compressor == "y" %}{% raw %}{% endcompress %}{% endraw %}{% endif %}{% raw %} {% endraw %}{% else %}{% raw %} - + - + {% endraw %}{% endif %}{% raw %} diff --git a/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/adapters.py b/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/adapters.py index 9361d6eca..0d206fae4 100644 --- a/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/adapters.py +++ b/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/adapters.py @@ -7,12 +7,10 @@ from django.http import HttpRequest class AccountAdapter(DefaultAccountAdapter): - def is_open_for_signup(self, request: HttpRequest): return getattr(settings, "ACCOUNT_ALLOW_REGISTRATION", True) class SocialAccountAdapter(DefaultSocialAccountAdapter): - def is_open_for_signup(self, request: HttpRequest, sociallogin: Any): return getattr(settings, "ACCOUNT_ALLOW_REGISTRATION", True) diff --git a/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/forms.py b/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/forms.py index 7bba81ff3..250cc9040 100644 --- a/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/forms.py +++ b/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/forms.py @@ -6,7 +6,6 @@ User = get_user_model() class UserChangeForm(forms.UserChangeForm): - class Meta(forms.UserChangeForm.Meta): model = User diff --git a/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/tests/factories.py b/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/tests/factories.py index 009905768..b53713661 100644 --- a/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/tests/factories.py +++ b/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/tests/factories.py @@ -19,9 +19,7 @@ class UserFactory(DjangoModelFactory): digits=True, upper_case=True, lower_case=True, - ).generate( - extra_kwargs={} - ) + ).generate(extra_kwargs={}) self.set_password(password) class Meta: diff --git a/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/tests/test_forms.py b/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/tests/test_forms.py index e80661648..dfa5da52e 100644 --- a/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/tests/test_forms.py +++ b/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/tests/test_forms.py @@ -7,7 +7,6 @@ pytestmark = pytest.mark.django_db class TestUserCreationForm: - def test_clean_username(self): # A user with proto_user params does not exist yet. proto_user = UserFactory.build() diff --git a/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/tests/test_views.py b/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/tests/test_views.py index 0992e4626..76cb80aa8 100644 --- a/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/tests/test_views.py +++ b/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/tests/test_views.py @@ -40,7 +40,6 @@ class TestUserUpdateView: class TestUserRedirectView: - def test_get_redirect_url( self, user: settings.AUTH_USER_MODEL, request_factory: RequestFactory ):