diff --git a/README.md b/README.md
index a9bf1990f..b73ecef9e 100644
--- a/README.md
+++ b/README.md
@@ -6,7 +6,7 @@
[](https://github.com/ambv/black)
[](https://pyup.io/repos/github/cookiecutter/cookiecutter-django/)
-[](https://discord.gg/uFXweDQc5a)
+[](https://discord.gg/rAWFUP47d2)
[](https://www.codetriage.com/cookiecutter/cookiecutter-django)
Powered by [Cookiecutter](https://github.com/cookiecutter/cookiecutter), Cookiecutter Django is a framework for jumpstarting
@@ -20,7 +20,7 @@ production-ready Django projects quickly.
## Features
- For Django 4.2
-- Works with Python 3.11
+- Works with Python 3.12
- Renders Django projects with 100% starting test coverage
- Twitter [Bootstrap](https://github.com/twbs/bootstrap) v5
- [12-Factor](https://12factor.net) based settings via [django-environ](https://github.com/joke2k/django-environ)
@@ -51,7 +51,7 @@ _These features can be enabled during initial project setup._
## Constraints
- Only maintained 3rd party libraries are used.
-- Uses PostgreSQL everywhere: 10 - 15 ([MySQL fork](https://github.com/mabdullahadeel/cookiecutter-django-mysql) also available).
+- Uses PostgreSQL everywhere: 12 - 16 ([MySQL fork](https://github.com/mabdullahadeel/cookiecutter-django-mysql) also available).
- Environment variables for configuration (This won't work with Apache/mod_wsgi).
## Support this Project!
@@ -65,19 +65,15 @@ This project is an open source project run by volunteers. You can sponsor us via
Projects that provide financial support to the maintainers:
----
+### Two Scoops of Django
-
-
-
+[](https://www.feldroy.com/two-scoops-press#two-scoops-of-django)
Two Scoops of Django 3.x is the best ice cream-themed Django reference in the universe!
### PyUp
-
-
-
+[](https://pyup.io)
PyUp brings you automated security and dependency updates used by Google and other organizations. Free for open source projects!
@@ -133,12 +129,11 @@ Answer the prompts with your own desired [options](http://cookiecutter-django.re
Choose from 1, 2, 3 [1]: 1
use_docker [n]: n
Select postgresql_version:
- 1 - 15
- 2 - 14
- 3 - 13
- 4 - 12
- 5 - 11
- 6 - 10
+ 1 - 16
+ 2 - 15
+ 3 - 14
+ 4 - 13
+ 5 - 12
Choose from 1, 2, 3, 4, 5 [1]: 1
Select cloud_provider:
1 - AWS
@@ -152,7 +147,7 @@ Answer the prompts with your own desired [options](http://cookiecutter-django.re
4 - Mandrill
5 - Postmark
6 - Sendgrid
- 7 - SendinBlue
+ 7 - Brevo (formerly SendinBlue)
8 - SparkPost
9 - Other SMTP
Choose from 1, 2, 3, 4, 5, 6, 7, 8, 9 [1]: 1
diff --git a/cookiecutter.json b/cookiecutter.json
index e343617d1..86e44f368 100644
--- a/cookiecutter.json
+++ b/cookiecutter.json
@@ -18,7 +18,7 @@
"windows": "n",
"editor": ["None", "PyCharm", "VS Code"],
"use_docker": "n",
- "postgresql_version": ["15", "14", "13", "12", "11", "10"],
+ "postgresql_version": ["16", "15", "14", "13", "12"],
"cloud_provider": ["AWS", "GCP", "Azure", "None"],
"mail_service": [
"Mailgun",
@@ -27,7 +27,7 @@
"Mandrill",
"Postmark",
"Sendgrid",
- "SendinBlue",
+ "Brevo",
"SparkPost",
"Other SMTP"
],
diff --git a/docs/deployment-on-heroku.rst b/docs/deployment-on-heroku.rst
index 71c6e11b2..fdd953e09 100644
--- a/docs/deployment-on-heroku.rst
+++ b/docs/deployment-on-heroku.rst
@@ -46,7 +46,7 @@ Run these commands to deploy the project to Heroku:
# Assign with AWS_STORAGE_BUCKET_NAME
heroku config:set DJANGO_AWS_STORAGE_BUCKET_NAME=
- git push heroku master
+ git push heroku main
heroku run python manage.py createsuperuser
diff --git a/docs/deployment-on-pythonanywhere.rst b/docs/deployment-on-pythonanywhere.rst
index 2fa6a960c..726f325e2 100644
--- a/docs/deployment-on-pythonanywhere.rst
+++ b/docs/deployment-on-pythonanywhere.rst
@@ -37,7 +37,7 @@ Make sure your project is fully committed and pushed up to Bitbucket or Github o
mkvirtualenv --python=/usr/bin/python3.10 my-project-name
pip install -r requirements/production.txt # may take a few minutes
-.. note:: We're creating the virtualenv using Python 3.10 (``--python=/usr/bin/python3.10```), although Cookiecutter Django generates a project for Python 3.11. This is because, at time of writing, PythonAnywhere only supports Python 3.10. It shouldn't be a problem, but if is, you may try changing the Python version to 3.11 and see if it works. If it does, please let us know, or even better, submit a pull request to update this section.
+.. note:: We're creating the virtualenv using Python 3.10 (``--python=/usr/bin/python3.10```), although Cookiecutter Django generates a project for Python 3.12. This is because, at time of writing, PythonAnywhere only supports Python 3.10. It shouldn't be a problem, but if is, you may try changing the Python version to 3.12 and see if it works. If it does, please let us know, or even better, submit a pull request to update this section.
Setting environment variables in the console
--------------------------------------------
diff --git a/docs/deployment-with-docker.rst b/docs/deployment-with-docker.rst
index 3d2f9f813..ebc42a52d 100644
--- a/docs/deployment-with-docker.rst
+++ b/docs/deployment-with-docker.rst
@@ -14,7 +14,7 @@ Prerequisites
Understanding the Docker Compose Setup
--------------------------------------
-Before you begin, check out the ``production.yml`` file in the root of this project. Keep note of how it provides configuration for the following services:
+Before you begin, check out the ``docker-compose.production.yml`` file in the root of this project. Keep note of how it provides configuration for the following services:
* ``django``: your application running behind ``Gunicorn``;
* ``postgres``: PostgreSQL database with the application's relational data;
@@ -107,7 +107,7 @@ To solve this, you can either:
2. create a ``.env`` file in the root of the project with just variables you need. You'll need to also define them in ``.envs/.production/.django`` (hence duplicating them).
3. set these variables when running the build command::
- DJANGO_AWS_S3_CUSTOM_DOMAIN=example.com docker compose -f production.yml build``.
+ DJANGO_AWS_S3_CUSTOM_DOMAIN=example.com docker compose -f docker-compose.production.yml build``.
None of these options are ideal, we're open to suggestions on how to improve this. If you think you have one, please open an issue or a pull request.
@@ -122,42 +122,42 @@ Building & Running Production Stack
You will need to build the stack first. To do that, run::
- docker compose -f production.yml build
+ docker compose -f docker-compose.production.yml build
Once this is ready, you can run it with::
- docker compose -f production.yml up
+ docker compose -f docker-compose.production.yml up
To run the stack and detach the containers, run::
- docker compose -f production.yml up -d
+ docker compose -f docker-compose.production.yml up -d
To run a migration, open up a second terminal and run::
- docker compose -f production.yml run --rm django python manage.py migrate
+ docker compose -f docker-compose.production.yml run --rm django python manage.py migrate
To create a superuser, run::
- docker compose -f production.yml run --rm django python manage.py createsuperuser
+ docker compose -f docker-compose.production.yml run --rm django python manage.py createsuperuser
If you need a shell, run::
- docker compose -f production.yml run --rm django python manage.py shell
+ docker compose -f docker-compose.production.yml run --rm django python manage.py shell
To check the logs out, run::
- docker compose -f production.yml logs
+ docker compose -f docker-compose.production.yml logs
If you want to scale your application, run::
- docker compose -f production.yml up --scale django=4
- docker compose -f production.yml up --scale celeryworker=2
+ docker compose -f docker-compose.production.yml up --scale django=4
+ docker compose -f docker-compose.production.yml up --scale celeryworker=2
.. warning:: don't try to scale ``postgres``, ``celerybeat``, or ``traefik``.
To see how your containers are doing run::
- docker compose -f production.yml ps
+ docker compose -f docker-compose.production.yml ps
Example: Supervisor
@@ -165,12 +165,12 @@ Example: Supervisor
Once you are ready with your initial setup, you want to make sure that your application is run by a process manager to
survive reboots and auto restarts in case of an error. You can use the process manager you are most familiar with. All
-it needs to do is to run ``docker compose -f production.yml up`` in your projects root directory.
+it needs to do is to run ``docker compose -f docker-compose.production.yml up`` in your projects root directory.
If you are using ``supervisor``, you can use this file as a starting point::
[program:{{cookiecutter.project_slug}}]
- command=docker compose -f production.yml up
+ command=docker compose -f docker-compose.production.yml up
directory=/path/to/{{cookiecutter.project_slug}}
redirect_stderr=true
autostart=true
diff --git a/docs/developing-locally-docker.rst b/docs/developing-locally-docker.rst
index a8f945adf..83de99bb9 100644
--- a/docs/developing-locally-docker.rst
+++ b/docs/developing-locally-docker.rst
@@ -32,9 +32,9 @@ Build the Stack
This can take a while, especially the first time you run this particular command on your development system::
- $ docker compose -f local.yml build
+ $ docker compose -f docker-compose.local.yml build
-Generally, if you want to emulate production environment use ``production.yml`` instead. And this is true for any other actions you might need to perform: whenever a switch is required, just do it!
+Generally, if you want to emulate production environment use ``docker-compose.production.yml`` instead. And this is true for any other actions you might need to perform: whenever a switch is required, just do it!
Before doing any git commit, `pre-commit`_ should be installed globally on your local machine, and then::
@@ -51,11 +51,11 @@ This brings up both Django and PostgreSQL. The first time it is run it might tak
Open a terminal at the project root and run the following for local development::
- $ docker compose -f local.yml up
+ $ docker compose -f docker-compose.local.yml up
-You can also set the environment variable ``COMPOSE_FILE`` pointing to ``local.yml`` like this::
+You can also set the environment variable ``COMPOSE_FILE`` pointing to ``docker-compose.local.yml`` like this::
- $ export COMPOSE_FILE=local.yml
+ $ export COMPOSE_FILE=docker-compose.local.yml
And then run::
@@ -65,16 +65,23 @@ To run in a detached (background) mode, just::
$ docker compose up -d
+These commands don't run the docs service. In order to run docs service you can run::
+
+ $ docker compose -f docker-compose.docs.yml up
+
+To run the docs with local services just use::
+
+ $ docker compose -f docker-compose.local.yml -f docker-compose.docs.yml up
The site should start and be accessible at http://localhost:3000 if you selected Webpack or Gulp as frontend pipeline and http://localhost:8000 otherwise.
Execute Management Commands
---------------------------
-As with any shell command that we wish to run in our container, this is done using the ``docker compose -f local.yml run --rm`` command: ::
+As with any shell command that we wish to run in our container, this is done using the ``docker compose -f docker-compose.local.yml run --rm`` command: ::
- $ docker compose -f local.yml run --rm django python manage.py migrate
- $ docker compose -f local.yml run --rm django python manage.py createsuperuser
+ $ docker compose -f docker-compose.local.yml run --rm django python manage.py migrate
+ $ docker compose -f docker-compose.local.yml run --rm django python manage.py createsuperuser
Here, ``django`` is the target service we are executing the commands against.
Also, please note that the ``docker exec`` does not work for running management commands.
@@ -90,7 +97,7 @@ When ``DEBUG`` is set to ``True``, the host is validated against ``['localhost',
Configuring the Environment
---------------------------
-This is the excerpt from your project's ``local.yml``: ::
+This is the excerpt from your project's ``docker-compose.local.yml``: ::
# ...
@@ -156,8 +163,8 @@ You have to modify the relevant requirement file: base, local or production by a
To get this change picked up, you'll need to rebuild the image(s) and restart the running container: ::
- docker compose -f local.yml build
- docker compose -f local.yml up
+ docker compose -f docker-compose.local.yml build
+ docker compose -f docker-compose.local.yml up
Debugging
~~~~~~~~~
@@ -171,7 +178,7 @@ If you are using the following within your code to debug: ::
Then you may need to run the following for it to work as desired: ::
- $ docker compose -f local.yml run --rm --service-ports django
+ $ docker compose -f docker-compose.local.yml run --rm --service-ports django
django-debug-toolbar
@@ -224,7 +231,7 @@ Prerequisites:
* ``use_docker`` was set to ``y`` on project initialization;
* ``use_celery`` was set to ``y`` on project initialization.
-By default, it's enabled both in local and production environments (``local.yml`` and ``production.yml`` Docker Compose configs, respectively) through a ``flower`` service. For added security, ``flower`` requires its clients to provide authentication credentials specified as the corresponding environments' ``.envs/.local/.django`` and ``.envs/.production/.django`` ``CELERY_FLOWER_USER`` and ``CELERY_FLOWER_PASSWORD`` environment variables. Check out ``localhost:5555`` and see for yourself.
+By default, it's enabled both in local and production environments (``docker-compose.local.yml`` and ``docker-compose.production.yml`` Docker Compose configs, respectively) through a ``flower`` service. For added security, ``flower`` requires its clients to provide authentication credentials specified as the corresponding environments' ``.envs/.local/.django`` and ``.envs/.production/.django`` ``CELERY_FLOWER_USER`` and ``CELERY_FLOWER_PASSWORD`` environment variables. Check out ``localhost:5555`` and see for yourself.
.. _`Flower`: https://github.com/mher/flower
@@ -272,7 +279,7 @@ certs
Take the certificates that you generated and place them in a folder called ``certs`` in the project's root folder. Assuming that you registered your local hostname as ``my-dev-env.local``, the certificates you will put in the folder should have the names ``my-dev-env.local.crt`` and ``my-dev-env.local.key``.
-local.yml
+docker-compose.local.yml
~~~~~~~~~
#. Add the ``nginx-proxy`` service. ::
@@ -316,7 +323,7 @@ You should allow the new hostname. ::
Rebuild your ``docker`` application. ::
- $ docker compose -f local.yml up -d --build
+ $ docker compose -f docker-compose.local.yml up -d --build
Go to your browser and type in your URL bar ``https://my-dev-env.local``
@@ -336,9 +343,9 @@ Webpack
If you are using Webpack:
-1. On the ``nginx-proxy`` service in ``local.yml``, change ``depends_on`` to ``node`` instead of ``django``.
+1. On the ``nginx-proxy`` service in ``docker-compose.local.yml``, change ``depends_on`` to ``node`` instead of ``django``.
-2. On the ``node`` service in ``local.yml``, add the following environment configuration:
+2. On the ``node`` service in ``docker-compose.local.yml``, add the following environment configuration:
::
diff --git a/docs/developing-locally.rst b/docs/developing-locally.rst
index 92379f4fd..16247d082 100644
--- a/docs/developing-locally.rst
+++ b/docs/developing-locally.rst
@@ -9,7 +9,7 @@ Setting Up Development Environment
Make sure to have the following on your host:
-* Python 3.11
+* Python 3.12
* PostgreSQL_.
* Redis_, if using Celery
* Cookiecutter_
@@ -18,7 +18,7 @@ First things first.
#. Create a virtualenv: ::
- $ python3.11 -m venv
+ $ python3.12 -m venv
#. Activate the virtualenv you have just created: ::
@@ -96,6 +96,61 @@ First things first.
.. _direnv: https://direnv.net/
+Creating Your First Django App
+-------------------------------
+
+After setting up your environment, you're ready to add your first app. This project uses the setup from "Two Scoops of Django" with a two-tier layout:
+
+- **Top Level Repository Root** has config files, documentation, `manage.py`, and more.
+- **Second Level Django Project Root** is where your Django apps live.
+- **Second Level Configuration Root** holds settings and URL configurations.
+
+The project layout looks something like this: ::
+
+ /
+ ├── config/
+ │ ├── settings/
+ │ │ ├── __init__.py
+ │ │ ├── base.py
+ │ │ ├── local.py
+ │ │ └── production.py
+ │ ├── urls.py
+ │ └── wsgi.py
+ ├── /
+ │ ├── /
+ │ │ ├── migrations/
+ │ │ ├── admin.py
+ │ │ ├── apps.py
+ │ │ ├── models.py
+ │ │ ├── tests.py
+ │ │ └── views.py
+ │ ├── __init__.py
+ │ └── ...
+ ├── requirements/
+ │ ├── base.txt
+ │ ├── local.txt
+ │ └── production.txt
+ ├── manage.py
+ ├── README.md
+ └── ...
+
+
+Following this structured approach, here's how to add a new app:
+
+#. **Create the app** using Django's ``startapp`` command, replacing ```` with your desired app name: ::
+
+ $ python manage.py startapp
+
+#. **Move the app** to the Django Project Root, maintaining the project's two-tier structure: ::
+
+ $ mv /
+
+#. **Edit the app's apps.py** change ``name = ''`` to ``name = '.'``.
+
+#. **Register the new app** by adding it to the ``LOCAL_APPS`` list in ``config/settings/base.py``, integrating it as an official component of your project.
+
+
+
Setup Email Backend
-------------------
diff --git a/docs/docker-postgres-backups.rst b/docs/docker-postgres-backups.rst
index fdf446030..d214ee4e8 100644
--- a/docs/docker-postgres-backups.rst
+++ b/docs/docker-postgres-backups.rst
@@ -1,14 +1,14 @@
PostgreSQL Backups with Docker
==============================
-.. note:: For brevity it is assumed that you will be running the below commands against local environment, however, this is by no means mandatory so feel free to switch to ``production.yml`` when needed.
+.. note:: For brevity it is assumed that you will be running the below commands against local environment, however, this is by no means mandatory so feel free to switch to ``docker-compose.production.yml`` when needed.
Prerequisites
-------------
#. the project was generated with ``use_docker`` set to ``y``;
-#. the stack is up and running: ``docker compose -f local.yml up -d postgres``.
+#. the stack is up and running: ``docker compose -f docker-compose.local.yml up -d postgres``.
Creating a Backup
@@ -16,7 +16,7 @@ Creating a Backup
To create a backup, run::
- $ docker compose -f local.yml exec postgres backup
+ $ docker compose -f docker-compose.local.yml exec postgres backup
Assuming your project's database is named ``my_project`` here is what you will see: ::
@@ -31,7 +31,7 @@ Viewing the Existing Backups
To list existing backups, ::
- $ docker compose -f local.yml exec postgres backups
+ $ docker compose -f docker-compose.local.yml exec postgres backups
These are the sample contents of ``/backups``: ::
@@ -55,9 +55,9 @@ With a single backup file copied to ``.`` that would be ::
$ docker cp 9c5c3f055843:/backups/backup_2018_03_13T09_05_07.sql.gz .
-You can also get the container ID using ``docker compose -f local.yml ps -q postgres`` so if you want to automate your backups, you don't have to check the container ID manually every time. Here is the full command ::
+You can also get the container ID using ``docker compose -f docker-compose.local.yml ps -q postgres`` so if you want to automate your backups, you don't have to check the container ID manually every time. Here is the full command ::
- $ docker cp $(docker compose -f local.yml ps -q postgres):/backups ./backups
+ $ docker cp $(docker compose -f docker-compose.local.yml ps -q postgres):/backups ./backups
.. _`command`: https://docs.docker.com/engine/reference/commandline/cp/
@@ -66,7 +66,7 @@ Restoring from the Existing Backup
To restore from one of the backups you have already got (take the ``backup_2018_03_13T09_05_07.sql.gz`` for example), ::
- $ docker compose -f local.yml exec postgres restore backup_2018_03_13T09_05_07.sql.gz
+ $ docker compose -f docker-compose.local.yml exec postgres restore backup_2018_03_13T09_05_07.sql.gz
You will see something like ::
@@ -95,12 +95,33 @@ Backup to Amazon S3
For uploading your backups to Amazon S3 you can use the aws cli container. There is an upload command for uploading the postgres /backups directory recursively and there is a download command for downloading a specific backup. The default S3 environment variables are used. ::
- $ docker compose -f production.yml run --rm awscli upload
- $ docker compose -f production.yml run --rm awscli download backup_2018_03_13T09_05_07.sql.gz
+ $ docker compose -f docker-compose.production.yml run --rm awscli upload
+ $ docker compose -f docker-compose.production.yml run --rm awscli download backup_2018_03_13T09_05_07.sql.gz
Remove Backup
----------------------------------
To remove backup you can use the ``rmbackup`` command. This will remove the backup from the ``/backups`` directory. ::
- $ docker compose -f local.yml exec postgres rmbackup backup_2018_03_13T09_05_07.sql.gz
+ $ docker compose -f docker-compose.local.yml exec postgres rmbackup backup_2018_03_13T09_05_07.sql.gz
+
+
+Upgrading PostgreSQL
+----------------------------------
+
+Upgrading PostgreSQL in your project requires a series of carefully executed steps. Start by halting all containers, excluding the postgres container. Following this, create a backup and proceed to remove the outdated data volume. ::
+
+ $ docker compose -f docker-compose.local.yml down
+ $ docker compose -f docker-compose.local.yml up -d postgres
+ $ docker compose -f docker-compose.local.yml run --rm postgres backup
+ $ docker compose -f docker-compose.local.yml down
+ $ docker volume rm my_project_postgres_data
+
+.. note:: Neglecting to remove the old data volume may lead to issues, such as the new postgres container failing to start with errors like ``FATAL: database files are incompatible with server``, and ``could not translate host name "postgres" to address: Name or service not known``.
+
+To complete the upgrade, update the PostgreSQL version in the corresponding Dockerfile (e.g. ``compose/production/postgres/Dockerfile``) and build a new version of PostgreSQL. ::
+
+ $ docker compose -f docker-compose.local.yml build postgres
+ $ docker compose -f docker-compose.local.yml up -d postgres
+ $ docker compose -f docker-compose.local.yml run --rm postgres restore backup_2018_03_13T09_05_07.sql.gz
+ $ docker compose -f docker-compose.local.yml up -d
diff --git a/docs/document.rst b/docs/document.rst
index 26f5d56a1..61cb692d3 100644
--- a/docs/document.rst
+++ b/docs/document.rst
@@ -11,7 +11,7 @@ After you have set up to `develop locally`_, run the following command from the
If you set up your project to `develop locally with docker`_, run the following command: ::
- $ docker compose -f local.yml up docs
+ $ docker compose -f docker-compose.docs.yml up
Navigate to port 9000 on your host to see the documentation. This will be opened automatically at `localhost`_ for local, non-docker development.
diff --git a/docs/faq.rst b/docs/faq.rst
index 9f0b52a7d..294e6c8e1 100644
--- a/docs/faq.rst
+++ b/docs/faq.rst
@@ -24,4 +24,4 @@ Why doesn't this follow the layout from Two Scoops of Django?
You may notice that some elements of this project do not exactly match what we describe in chapter 3 of `Two Scoops of Django 3.x`_. The reason for that is this project, amongst other things, serves as a test bed for trying out new ideas and concepts. Sometimes they work, sometimes they don't, but the end result is that it won't necessarily match precisely what is described in the book I co-authored.
-.. _Two Scoops of Django 3.x: https://www.feldroy.com/books/two-scoops-of-django-3-x
+.. _Two Scoops of Django 3.x: https://www.feldroy.com/two-scoops-press#two-scoops-of-django
diff --git a/docs/project-generation-options.rst b/docs/project-generation-options.rst
index 967b42426..21059d3a2 100644
--- a/docs/project-generation-options.rst
+++ b/docs/project-generation-options.rst
@@ -66,12 +66,11 @@ use_docker:
postgresql_version:
Select a PostgreSQL_ version to use. The choices are:
- 1. 15
- 2. 14
- 3. 13
- 4. 12
- 5. 11
- 6. 10
+ 1. 16
+ 2. 15
+ 3. 14
+ 4. 13
+ 5. 12
cloud_provider:
Select a cloud provider for static & media files. The choices are:
@@ -92,7 +91,7 @@ mail_service:
4. Mandrill_
5. Postmark_
6. SendGrid_
- 7. SendinBlue_
+ 7. `Brevo (formerly SendinBlue)`_
8. SparkPost_
9. `Other SMTP`_
@@ -175,7 +174,7 @@ debug:
.. _Mandrill: http://mandrill.com
.. _Postmark: https://postmarkapp.com
.. _SendGrid: https://sendgrid.com
-.. _SendinBlue: https://www.sendinblue.com
+.. _Brevo (formerly SendinBlue): https://www.brevo.com
.. _SparkPost: https://www.sparkpost.com
.. _Other SMTP: https://anymail.readthedocs.io/en/stable/
diff --git a/docs/requirements.txt b/docs/requirements.txt
index d002affa7..d4b0420de 100644
--- a/docs/requirements.txt
+++ b/docs/requirements.txt
@@ -1,3 +1,3 @@
-sphinx==7.2.6
+sphinx==7.4.5
sphinx-rtd-theme==2.0.0
-myst-parser==2.0.0
+myst-parser==3.0.1
diff --git a/docs/settings.rst b/docs/settings.rst
index 0880bce95..2be79cbf5 100644
--- a/docs/settings.rst
+++ b/docs/settings.rst
@@ -69,8 +69,8 @@ SENDGRID_API_KEY SENDGRID_API_KEY n/a
SENDGRID_GENERATE_MESSAGE_ID True n/a raises error
SENDGRID_MERGE_FIELD_FORMAT None n/a raises error
SENDGRID_API_URL n/a n/a "https://api.sendgrid.com/v3/"
-SENDINBLUE_API_KEY SENDINBLUE_API_KEY n/a raises error
-SENDINBLUE_API_URL n/a n/a "https://api.sendinblue.com/v3/"
+BREVO_API_KEY BREVO_API_KEY n/a raises error
+BREVO_API_URL n/a n/a "https://api.brevo.com/v3/"
SPARKPOST_API_KEY SPARKPOST_API_KEY n/a raises error
SPARKPOST_API_URL n/a n/a "https://api.sparkpost.com/api/v1"
======================================= =========================== ============================================== ======================================================================
diff --git a/docs/testing.rst b/docs/testing.rst
index d403a30eb..58a05770a 100644
--- a/docs/testing.rst
+++ b/docs/testing.rst
@@ -19,7 +19,7 @@ You will get a readout of the `users` app that has already been set up with test
If you set up your project to `develop locally with docker`_, run the following command: ::
- $ docker compose -f local.yml run --rm django pytest
+ $ docker compose -f docker-compose.local.yml run --rm django pytest
Targeting particular apps for testing in ``docker`` follows a similar pattern as previously shown above.
@@ -36,8 +36,8 @@ Once the tests are complete, in order to see the code coverage, run the followin
If you're running the project locally with Docker, use these commands instead: ::
- $ docker compose -f local.yml run --rm django coverage run -m pytest
- $ docker compose -f local.yml run --rm django coverage report
+ $ docker compose -f docker-compose.local.yml run --rm django coverage run -m pytest
+ $ docker compose -f docker-compose.local.yml run --rm django coverage report
.. note::
diff --git a/docs/troubleshooting.rst b/docs/troubleshooting.rst
index 80bab2e29..847f0a701 100644
--- a/docs/troubleshooting.rst
+++ b/docs/troubleshooting.rst
@@ -30,7 +30,7 @@ If you recreate the project multiple times with the same name, Docker would pres
To fix this, you can either:
-- Clear your project-related Docker cache with ``docker compose -f local.yml down --volumes --rmi all``.
+- Clear your project-related Docker cache with ``docker compose -f docker-compose.local.yml down --volumes --rmi all``.
- Use the Docker volume sub-commands to find volumes (`ls`_) and remove them (`rm`_).
- Use the `prune`_ command to clear system-wide (use with care!).
diff --git a/hooks/post_gen_project.py b/hooks/post_gen_project.py
index 1ddab0636..9e9af5f2d 100644
--- a/hooks/post_gen_project.py
+++ b/hooks/post_gen_project.py
@@ -78,7 +78,11 @@ def remove_docker_files():
shutil.rmtree(".devcontainer")
shutil.rmtree("compose")
- file_names = ["local.yml", "production.yml", ".dockerignore"]
+ file_names = [
+ "docker-compose.local.yml",
+ "docker-compose.production.yml",
+ ".dockerignore",
+ ]
for file_name in file_names:
os.remove(file_name)
if "{{ cookiecutter.editor }}" == "PyCharm":
diff --git a/hooks/pre_gen_project.py b/hooks/pre_gen_project.py
index 2956b9ab4..e58fd3541 100644
--- a/hooks/pre_gen_project.py
+++ b/hooks/pre_gen_project.py
@@ -39,7 +39,7 @@ if "{{ cookiecutter.use_docker }}".lower() == "n":
if python_major_version == 2:
print(
WARNING + "You're running cookiecutter under Python 2, but the generated "
- "project requires Python 3.11+. Do you want to proceed (y/n)? " + TERMINATOR
+ "project requires Python 3.12+. Do you want to proceed (y/n)? " + TERMINATOR
)
yes_options, no_options = frozenset(["y"]), frozenset(["n"])
while True:
diff --git a/pyproject.toml b/pyproject.toml
index 2a9f00b29..6e68762f7 100644
--- a/pyproject.toml
+++ b/pyproject.toml
@@ -15,7 +15,7 @@ norecursedirs = [
# ==== black ====
[tool.black]
line-length = 119
-target-version = ['py311']
+target-version = ['py312']
# ==== isort ====
diff --git a/requirements.txt b/requirements.txt
index 4fcc7bb8c..000d1cf36 100644
--- a/requirements.txt
+++ b/requirements.txt
@@ -1,26 +1,26 @@
cookiecutter==2.6.0
-sh==2.0.6; sys_platform != "win32"
+sh==2.0.7; sys_platform != "win32"
binaryornot==0.4.4
# Code quality
# ------------------------------------------------------------------------------
-ruff==0.3.0
-django-upgrade==1.16.0
+ruff==0.5.2
+django-upgrade==1.19.0
djlint==1.34.1
-pre-commit==3.6.2
+pre-commit==3.7.1
# Testing
# ------------------------------------------------------------------------------
-tox==4.13.0
-pytest==8.1.0
-pytest-xdist==3.5.0
+tox==4.16.0
+pytest==8.2.2
+pytest-xdist==3.6.1
pytest-cookies==0.7.0
pytest-instafail==0.5.0
pyyaml==6.0.1
# Scripting
# ------------------------------------------------------------------------------
-PyGithub==2.2.0
-gitpython==3.1.42
-jinja2==3.1.3
-requests==2.31.0
+PyGithub==2.3.0
+gitpython==3.1.43
+jinja2==3.1.4
+requests==2.32.3
diff --git a/setup.py b/setup.py
index 6e1ea0727..312da1c83 100644
--- a/setup.py
+++ b/setup.py
@@ -5,7 +5,7 @@ except ImportError:
from distutils.core import setup
# We use calendar versioning
-version = "2024.03.03"
+version = "2024.07.17"
with open("README.md") as readme_file:
long_description = readme_file.read()
@@ -30,7 +30,7 @@ setup(
"License :: OSI Approved :: BSD License",
"Programming Language :: Python",
"Programming Language :: Python :: 3",
- "Programming Language :: Python :: 3.11",
+ "Programming Language :: Python :: 3.12",
"Programming Language :: Python :: Implementation :: CPython",
"Topic :: Software Development",
],
diff --git a/tests/test_cookiecutter_generation.py b/tests/test_cookiecutter_generation.py
index b744a986c..5a0bca383 100755
--- a/tests/test_cookiecutter_generation.py
+++ b/tests/test_cookiecutter_generation.py
@@ -57,12 +57,11 @@ SUPPORTED_COMBINATIONS = [
{"editor": "VS Code"},
{"use_docker": "y"},
{"use_docker": "n"},
+ {"postgresql_version": "16"},
{"postgresql_version": "15"},
{"postgresql_version": "14"},
{"postgresql_version": "13"},
{"postgresql_version": "12"},
- {"postgresql_version": "11"},
- {"postgresql_version": "10"},
{"cloud_provider": "AWS", "use_whitenoise": "y"},
{"cloud_provider": "AWS", "use_whitenoise": "n"},
{"cloud_provider": "GCP", "use_whitenoise": "y"},
@@ -74,7 +73,7 @@ SUPPORTED_COMBINATIONS = [
{"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "Mandrill"},
{"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "Postmark"},
{"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "Sendgrid"},
- {"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "SendinBlue"},
+ {"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "Brevo"},
{"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "SparkPost"},
{"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "Other SMTP"},
# Note: cloud_provider=None AND use_whitenoise=n is not supported
@@ -84,7 +83,7 @@ SUPPORTED_COMBINATIONS = [
{"cloud_provider": "AWS", "mail_service": "Mandrill"},
{"cloud_provider": "AWS", "mail_service": "Postmark"},
{"cloud_provider": "AWS", "mail_service": "Sendgrid"},
- {"cloud_provider": "AWS", "mail_service": "SendinBlue"},
+ {"cloud_provider": "AWS", "mail_service": "Brevo"},
{"cloud_provider": "AWS", "mail_service": "SparkPost"},
{"cloud_provider": "AWS", "mail_service": "Other SMTP"},
{"cloud_provider": "GCP", "mail_service": "Mailgun"},
@@ -92,7 +91,7 @@ SUPPORTED_COMBINATIONS = [
{"cloud_provider": "GCP", "mail_service": "Mandrill"},
{"cloud_provider": "GCP", "mail_service": "Postmark"},
{"cloud_provider": "GCP", "mail_service": "Sendgrid"},
- {"cloud_provider": "GCP", "mail_service": "SendinBlue"},
+ {"cloud_provider": "GCP", "mail_service": "Brevo"},
{"cloud_provider": "GCP", "mail_service": "SparkPost"},
{"cloud_provider": "GCP", "mail_service": "Other SMTP"},
{"cloud_provider": "Azure", "mail_service": "Mailgun"},
@@ -100,7 +99,7 @@ SUPPORTED_COMBINATIONS = [
{"cloud_provider": "Azure", "mail_service": "Mandrill"},
{"cloud_provider": "Azure", "mail_service": "Postmark"},
{"cloud_provider": "Azure", "mail_service": "Sendgrid"},
- {"cloud_provider": "Azure", "mail_service": "SendinBlue"},
+ {"cloud_provider": "Azure", "mail_service": "Brevo"},
{"cloud_provider": "Azure", "mail_service": "SparkPost"},
{"cloud_provider": "Azure", "mail_service": "Other SMTP"},
# Note: cloud_providers GCP, Azure, and None
@@ -248,7 +247,13 @@ def test_djlint_lint_passes(cookies, context_override):
# TODO: remove T002 when fixed https://github.com/Riverside-Healthcare/djLint/issues/687
ignored_rules = "H006,H030,H031,T002"
try:
- sh.djlint("--lint", "--ignore", f"{autofixable_rules},{ignored_rules}", ".", _cwd=str(result.project_path))
+ sh.djlint(
+ "--lint",
+ "--ignore",
+ f"{autofixable_rules},{ignored_rules}",
+ ".",
+ _cwd=str(result.project_path),
+ )
except sh.ErrorReturnCode as e:
pytest.fail(e.stdout.decode())
@@ -269,7 +274,7 @@ def test_djlint_check_passes(cookies, context_override):
["use_docker", "expected_test_script"],
[
("n", "pytest"),
- ("y", "docker compose -f local.yml run django pytest"),
+ ("y", "docker compose -f docker-compose.local.yml run django pytest"),
],
)
def test_travis_invokes_pytest(cookies, context, use_docker, expected_test_script):
@@ -294,7 +299,7 @@ def test_travis_invokes_pytest(cookies, context, use_docker, expected_test_scrip
["use_docker", "expected_test_script"],
[
("n", "pytest"),
- ("y", "docker compose -f local.yml run django pytest"),
+ ("y", "docker compose -f docker-compose.local.yml run django pytest"),
],
)
def test_gitlab_invokes_precommit_and_pytest(cookies, context, use_docker, expected_test_script):
@@ -321,7 +326,7 @@ def test_gitlab_invokes_precommit_and_pytest(cookies, context, use_docker, expec
["use_docker", "expected_test_script"],
[
("n", "pytest"),
- ("y", "docker compose -f local.yml run django pytest"),
+ ("y", "docker compose -f docker-compose.local.yml run django pytest"),
],
)
def test_github_invokes_linter_and_pytest(cookies, context, use_docker, expected_test_script):
diff --git a/tests/test_docker.sh b/tests/test_docker.sh
index c3cad3b37..473eede04 100755
--- a/tests/test_docker.sh
+++ b/tests/test_docker.sh
@@ -15,22 +15,22 @@ cookiecutter ../../ --no-input --overwrite-if-exists use_docker=y "$@"
cd my_awesome_project
# make sure all images build
-docker compose -f local.yml build
+docker compose -f docker-compose.local.yml build
# run the project's type checks
-docker compose -f local.yml run django mypy my_awesome_project
+docker compose -f docker-compose.local.yml run django mypy my_awesome_project
# run the project's tests
-docker compose -f local.yml run django pytest
+docker compose -f docker-compose.local.yml run django pytest
# return non-zero status code if there are migrations that have not been created
-docker compose -f local.yml run django python manage.py makemigrations --dry-run --check || { echo "ERROR: there were changes in the models, but migration listed above have not been created and are not saved in version control"; exit 1; }
+docker compose -f docker-compose.local.yml run django python manage.py makemigrations --dry-run --check || { echo "ERROR: there were changes in the models, but migration listed above have not been created and are not saved in version control"; exit 1; }
# Test support for translations
-docker compose -f local.yml run django python manage.py makemessages --all
+docker compose -f docker-compose.local.yml run django python manage.py makemessages --all
# Make sure the check doesn't raise any warnings
-docker compose -f local.yml run \
+docker compose -f docker-compose.local.yml run \
-e DJANGO_SECRET_KEY="$(openssl rand -base64 64)" \
-e REDIS_URL=redis://redis:6379/0 \
-e CELERY_BROKER_URL=redis://redis:6379/0 \
@@ -43,10 +43,10 @@ docker compose -f local.yml run \
django python manage.py check --settings=config.settings.production --deploy --database default --fail-level WARNING
# Generate the HTML for the documentation
-docker compose -f local.yml run docs make html
+docker compose -f docker-compose.docs.yml run docs make html
# Run npm build script if package.json is present
if [ -f "package.json" ]
then
- docker compose -f local.yml run node npm run build
+ docker compose -f docker-compose.local.yml run node npm run build
fi
diff --git a/tox.ini b/tox.ini
index 903d5a53b..3b7a95088 100644
--- a/tox.ini
+++ b/tox.ini
@@ -1,6 +1,6 @@
[tox]
skipsdist = true
-envlist = py311,black-template
+envlist = py312,black-template
[testenv]
deps = -rrequirements.txt
diff --git a/{{cookiecutter.project_slug}}/.devcontainer/devcontainer.json b/{{cookiecutter.project_slug}}/.devcontainer/devcontainer.json
index e16d06a20..5604b8a85 100644
--- a/{{cookiecutter.project_slug}}/.devcontainer/devcontainer.json
+++ b/{{cookiecutter.project_slug}}/.devcontainer/devcontainer.json
@@ -2,7 +2,7 @@
{
"name": "{{cookiecutter.project_slug}}_dev",
"dockerComposeFile": [
- "../local.yml"
+ "../docker-compose.local.yml"
],
"init": true,
"mounts": [
diff --git a/{{cookiecutter.project_slug}}/.drone.yml b/{{cookiecutter.project_slug}}/.drone.yml
index dc08bfbab..d6c13e62b 100644
--- a/{{cookiecutter.project_slug}}/.drone.yml
+++ b/{{cookiecutter.project_slug}}/.drone.yml
@@ -13,7 +13,7 @@ environment:
steps:
- name: lint
pull: if-not-exists
- image: python:3.11
+ image: python:3.12
environment:
PRE_COMMIT_HOME: ${CI_PROJECT_DIR}/.cache/pre-commit
volumes:
@@ -27,16 +27,17 @@ steps:
- name: test
pull: if-not-exists
{%- if cookiecutter.use_docker == 'y' %}
- image: docker/compose:1.29.2
+ image: docker:25.0
environment:
DATABASE_URL: pgsql://$POSTGRES_USER:$POSTGRES_PASSWORD@postgres/$POSTGRES_DB
commands:
- - docker-compose -f local.yml build
- - docker-compose -f local.yml run --rm django python manage.py migrate
- - docker-compose -f local.yml up -d
- - docker-compose -f local.yml run django pytest
+ - docker-compose -f docker-compose.local.yml build
+ - docker-compose -f docker-compose.docs.yml build
+ - docker-compose -f docker-compose.local.yml run --rm django python manage.py migrate
+ - docker-compose -f docker-compose.local.yml up -d
+ - docker-compose -f docker-compose.local.yml run django pytest
{%- else %}
- image: python:3.11
+ image: python:3.12
commands:
- pip install -r requirements/local.txt
- pytest
diff --git a/{{cookiecutter.project_slug}}/.envs/.production/.django b/{{cookiecutter.project_slug}}/.envs/.production/.django
index ad652c9ad..07ffd7112 100644
--- a/{{cookiecutter.project_slug}}/.envs/.production/.django
+++ b/{{cookiecutter.project_slug}}/.envs/.production/.django
@@ -28,8 +28,8 @@ POSTMARK_SERVER_TOKEN=
SENDGRID_API_KEY=
SENDGRID_GENERATE_MESSAGE_ID=True
SENDGRID_MERGE_FIELD_FORMAT=None
-{% elif cookiecutter.mail_service == 'SendinBlue' %}
-SENDINBLUE_API_KEY=
+{% elif cookiecutter.mail_service == 'Brevo' %}
+BREVO_API_KEY=
{% elif cookiecutter.mail_service == 'SparkPost' %}
SPARKPOST_API_KEY=
{% endif %}
diff --git a/{{cookiecutter.project_slug}}/.github/workflows/ci.yml b/{{cookiecutter.project_slug}}/.github/workflows/ci.yml
index 414ee1e60..5cb9ead4f 100644
--- a/{{cookiecutter.project_slug}}/.github/workflows/ci.yml
+++ b/{{cookiecutter.project_slug}}/.github/workflows/ci.yml
@@ -26,15 +26,15 @@ jobs:
uses: actions/checkout@v4
- name: Set up Python
- uses: actions/setup-python@v4
+ uses: actions/setup-python@v5
with:
- python-version: '3.11'
+ python-version: '3.12'
{%- if cookiecutter.open_source_license != 'Not open source' %}
# Consider using pre-commit.ci for open source project
{%- endif %}
- name: Run pre-commit
- uses: pre-commit/action@v3.0.0
+ uses: pre-commit/action@v3.0.1
# With no caching at all the entire ci process takes 3m to complete!
pytest:
@@ -69,22 +69,25 @@ jobs:
{%- if cookiecutter.use_docker == 'y' %}
- name: Build the Stack
- run: docker compose -f local.yml build django
+ run: docker compose -f docker-compose.local.yml build django
+
+ - name: Build the docs
+ run: docker compose -f docker-compose.docs.yml build docs
- name: Run DB Migrations
- run: docker compose -f local.yml run --rm django python manage.py migrate
+ run: docker compose -f docker-compose.local.yml run --rm django python manage.py migrate
- name: Run Django Tests
- run: docker compose -f local.yml run django pytest
+ run: docker compose -f docker-compose.local.yml run django pytest
- name: Tear down the Stack
- run: docker compose -f local.yml down
+ run: docker compose -f docker-compose.local.yml down
{%- else %}
- name: Set up Python
uses: actions/setup-python@v4
with:
- python-version: '3.11'
+ python-version: '3.12'
cache: pip
cache-dependency-path: |
requirements/base.txt
diff --git a/{{cookiecutter.project_slug}}/.gitlab-ci.yml b/{{cookiecutter.project_slug}}/.gitlab-ci.yml
index 350212003..71216bc7a 100644
--- a/{{cookiecutter.project_slug}}/.gitlab-ci.yml
+++ b/{{cookiecutter.project_slug}}/.gitlab-ci.yml
@@ -13,7 +13,7 @@ variables:
precommit:
stage: lint
- image: python:3.11
+ image: python:3.12
variables:
PRE_COMMIT_HOME: ${CI_PROJECT_DIR}/.cache/pre-commit
cache:
@@ -27,22 +27,19 @@ precommit:
pytest:
stage: test
{%- if cookiecutter.use_docker == 'y' %}
- image: docker/compose:1.29.2
- tags:
- - docker
+ image: docker:25.0
services:
- docker:dind
before_script:
- - docker compose -f local.yml build
+ - docker compose -f docker-compose.local.yml build
+ - docker compose -f docker-compose.docs.yml build
# Ensure celerybeat does not crash due to non-existent tables
- - docker compose -f local.yml run --rm django python manage.py migrate
- - docker compose -f local.yml up -d
+ - docker compose -f docker-compose.local.yml run --rm django python manage.py migrate
+ - docker compose -f docker-compose.local.yml up -d
script:
- - docker compose -f local.yml run django pytest
+ - docker compose -f docker-compose.local.yml run django pytest
{%- else %}
- image: python:3.11
- tags:
- - python
+ image: python:3.12
services:
- postgres:{{ cookiecutter.postgresql_version }}
variables:
diff --git a/{{cookiecutter.project_slug}}/.idea/runConfigurations/docker_compose_up_django.xml b/{{cookiecutter.project_slug}}/.idea/runConfigurations/docker_compose_up_django.xml
index e84c5ffdd..0eabd93c8 100644
--- a/{{cookiecutter.project_slug}}/.idea/runConfigurations/docker_compose_up_django.xml
+++ b/{{cookiecutter.project_slug}}/.idea/runConfigurations/docker_compose_up_django.xml
@@ -15,7 +15,7 @@
{%- endif %}
-
+
diff --git a/{{cookiecutter.project_slug}}/.idea/runConfigurations/docker_compose_up_docs.xml b/{{cookiecutter.project_slug}}/.idea/runConfigurations/docker_compose_up_docs.xml
index 0f77b28df..6134cf917 100644
--- a/{{cookiecutter.project_slug}}/.idea/runConfigurations/docker_compose_up_docs.xml
+++ b/{{cookiecutter.project_slug}}/.idea/runConfigurations/docker_compose_up_docs.xml
@@ -8,7 +8,7 @@
-
+
diff --git a/{{cookiecutter.project_slug}}/.pre-commit-config.yaml b/{{cookiecutter.project_slug}}/.pre-commit-config.yaml
index d95f5390d..956ca8d00 100644
--- a/{{cookiecutter.project_slug}}/.pre-commit-config.yaml
+++ b/{{cookiecutter.project_slug}}/.pre-commit-config.yaml
@@ -2,11 +2,11 @@ exclude: '^docs/|/migrations/|devcontainer.json'
default_stages: [commit]
default_language_version:
- python: python3.11
+ python: python3.12
repos:
- repo: https://github.com/pre-commit/pre-commit-hooks
- rev: v4.5.0
+ rev: v4.6.0
hooks:
- id: trailing-whitespace
- id: end-of-file-fixer
@@ -28,14 +28,14 @@ repos:
exclude: '{{cookiecutter.project_slug}}/templates/'
- repo: https://github.com/adamchainz/django-upgrade
- rev: '1.16.0'
+ rev: '1.19.0'
hooks:
- id: django-upgrade
args: ['--target-version', '4.2']
# Run the Ruff linter.
- repo: https://github.com/astral-sh/ruff-pre-commit
- rev: v0.3.0
+ rev: v0.5.2
hooks:
# Linter
- id: ruff
diff --git a/{{cookiecutter.project_slug}}/.readthedocs.yml b/{{cookiecutter.project_slug}}/.readthedocs.yml
index d5a8ef661..556438876 100644
--- a/{{cookiecutter.project_slug}}/.readthedocs.yml
+++ b/{{cookiecutter.project_slug}}/.readthedocs.yml
@@ -8,7 +8,7 @@ version: 2
build:
os: ubuntu-22.04
tools:
- python: '3.11'
+ python: '3.12'
# Build documentation in the docs/ directory with Sphinx
sphinx:
diff --git a/{{cookiecutter.project_slug}}/.travis.yml b/{{cookiecutter.project_slug}}/.travis.yml
index 78709191a..97f9f60a2 100644
--- a/{{cookiecutter.project_slug}}/.travis.yml
+++ b/{{cookiecutter.project_slug}}/.travis.yml
@@ -2,7 +2,7 @@ dist: focal
language: python
python:
- - "3.11"
+ - "3.12"
services:
- {% if cookiecutter.use_docker == 'y' %}docker{% else %}postgresql{% endif %}
@@ -19,14 +19,15 @@ jobs:
before_script:
- docker compose -v
- docker -v
- - docker compose -f local.yml build
+ - docker compose -f docker-compose.local.yml build
+ - docker compose -f docker-compose.docs.yml build
# Ensure celerybeat does not crash due to non-existent tables
- - docker compose -f local.yml run --rm django python manage.py migrate
- - docker compose -f local.yml up -d
+ - docker compose -f docker-compose.local.yml run --rm django python manage.py migrate
+ - docker compose -f docker-compose.local.yml up -d
script:
- - docker compose -f local.yml run django pytest
+ - docker compose -f docker-compose.local.yml run django pytest
after_failure:
- - docker compose -f local.yml logs
+ - docker compose -f docker-compose.local.yml logs
{%- else %}
before_install:
- sudo apt-get update -qq
@@ -37,7 +38,7 @@ jobs:
- sudo apt-get install -qq libsqlite3-dev libxml2 libxml2-dev libssl-dev libbz2-dev wget curl llvm
language: python
python:
- - "3.11"
+ - "3.12"
install:
- pip install -r requirements/local.txt
script:
diff --git a/{{cookiecutter.project_slug}}/Procfile b/{{cookiecutter.project_slug}}/Procfile
index 2f2fbe927..6424e048d 100644
--- a/{{cookiecutter.project_slug}}/Procfile
+++ b/{{cookiecutter.project_slug}}/Procfile
@@ -1,6 +1,6 @@
release: python manage.py migrate
{%- if cookiecutter.use_async == "y" %}
-web: gunicorn config.asgi:application -k uvicorn.workers.UvicornWorker
+web: gunicorn config.asgi:application -k uvicorn_worker.UvicornWorker
{%- else %}
web: gunicorn config.wsgi:application
{%- endif %}
diff --git a/{{cookiecutter.project_slug}}/compose/local/django/Dockerfile b/{{cookiecutter.project_slug}}/compose/local/django/Dockerfile
index 75d5cbb9b..26a21c938 100644
--- a/{{cookiecutter.project_slug}}/compose/local/django/Dockerfile
+++ b/{{cookiecutter.project_slug}}/compose/local/django/Dockerfile
@@ -1,8 +1,8 @@
# define an alias for the specific python version used in this file.
-FROM docker.io/python:3.11.8-slim-bookworm as python
+FROM docker.io/python:3.12.4-slim-bookworm AS python
# Python build stage
-FROM docker.io/python as python-build-stage
+FROM python AS python-build-stage
ARG BUILD_ENVIRONMENT=local
@@ -10,7 +10,7 @@ ARG BUILD_ENVIRONMENT=local
RUN apt-get update && apt-get install --no-install-recommends -y \
# dependencies for building Python packages
build-essential \
- # psycopg2 dependencies
+ # psycopg dependencies
libpq-dev
# Requirements are installed here to ensure they will be cached.
@@ -22,7 +22,7 @@ RUN pip wheel --wheel-dir /usr/src/app/wheels \
# Python 'run' stage
-FROM docker.io/python as python-run-stage
+FROM python AS python-run-stage
ARG BUILD_ENVIRONMENT=local
ARG APP_HOME=/app
@@ -47,7 +47,7 @@ RUN groupadd --gid 1000 dev-user \
# Install required system dependencies
RUN apt-get update && apt-get install --no-install-recommends -y \
- # psycopg2 dependencies
+ # psycopg dependencies
libpq-dev \
# Translations dependencies
gettext \
diff --git a/{{cookiecutter.project_slug}}/compose/local/django/celery/flower/start b/{{cookiecutter.project_slug}}/compose/local/django/celery/flower/start
index b4783d2f0..cebb62203 100644
--- a/{{cookiecutter.project_slug}}/compose/local/django/celery/flower/start
+++ b/{{cookiecutter.project_slug}}/compose/local/django/celery/flower/start
@@ -3,6 +3,14 @@
set -o errexit
set -o nounset
+
+until timeout 10 celery -A config.celery_app inspect ping; do
+ >&2 echo "Celery workers not available"
+done
+
+echo 'Starting flower'
+
+
exec watchfiles --filter python celery.__main__.main \
--args \
"-A config.celery_app -b \"${CELERY_BROKER_URL}\" flower --basic_auth=\"${CELERY_FLOWER_USER}:${CELERY_FLOWER_PASSWORD}\""
diff --git a/{{cookiecutter.project_slug}}/compose/local/docs/Dockerfile b/{{cookiecutter.project_slug}}/compose/local/docs/Dockerfile
index 87a1b2465..54e209886 100644
--- a/{{cookiecutter.project_slug}}/compose/local/docs/Dockerfile
+++ b/{{cookiecutter.project_slug}}/compose/local/docs/Dockerfile
@@ -1,16 +1,16 @@
# define an alias for the specific python version used in this file.
-FROM docker.io/python:3.11.8-slim-bookworm as python
+FROM docker.io/python:3.12.4-slim-bookworm AS python
# Python build stage
-FROM docker.io/python as python-build-stage
+FROM python AS python-build-stage
ENV PYTHONDONTWRITEBYTECODE 1
RUN apt-get update && apt-get install --no-install-recommends -y \
# dependencies for building Python packages
build-essential \
- # psycopg2 dependencies
+ # psycopg dependencies
libpq-dev \
# cleaning up unused files
&& apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false \
@@ -26,7 +26,7 @@ RUN pip wheel --no-cache-dir --wheel-dir /usr/src/app/wheels \
# Python 'run' stage
-FROM docker.io/python as python-run-stage
+FROM python AS python-run-stage
ARG BUILD_ENVIRONMENT
ENV PYTHONUNBUFFERED 1
@@ -35,7 +35,7 @@ ENV PYTHONDONTWRITEBYTECODE 1
RUN apt-get update && apt-get install --no-install-recommends -y \
# To run the Makefile
make \
- # psycopg2 dependencies
+ # psycopg dependencies
libpq-dev \
# Translations dependencies
gettext \
diff --git a/{{cookiecutter.project_slug}}/compose/production/aws/Dockerfile b/{{cookiecutter.project_slug}}/compose/production/aws/Dockerfile
index 36eea7f8c..9c7b8a69a 100644
--- a/{{cookiecutter.project_slug}}/compose/production/aws/Dockerfile
+++ b/{{cookiecutter.project_slug}}/compose/production/aws/Dockerfile
@@ -1,4 +1,7 @@
-FROM docker.io/garland/aws-cli-docker:1.16.140
+FROM docker.io/amazon/aws-cli:2.17.0
+
+# Clear entrypoint from the base image, otherwise it's always calling the aws CLI
+ENTRYPOINT []
COPY ./compose/production/aws/maintenance /usr/local/bin/maintenance
COPY ./compose/production/postgres/maintenance/_sourced /usr/local/bin/maintenance/_sourced
diff --git a/{{cookiecutter.project_slug}}/compose/production/aws/maintenance/download b/{{cookiecutter.project_slug}}/compose/production/aws/maintenance/download
index 9561d917a..12871a773 100644
--- a/{{cookiecutter.project_slug}}/compose/production/aws/maintenance/download
+++ b/{{cookiecutter.project_slug}}/compose/production/aws/maintenance/download
@@ -3,7 +3,7 @@
### Download a file from your Amazon S3 bucket to the postgres /backups folder
###
### Usage:
-### $ docker compose -f production.yml run --rm awscli <1>
+### $ docker compose -f docker-compose.production.yml run --rm awscli <1>
set -o errexit
set -o pipefail
diff --git a/{{cookiecutter.project_slug}}/compose/production/aws/maintenance/upload b/{{cookiecutter.project_slug}}/compose/production/aws/maintenance/upload
index 73c1b9bec..2f577824e 100644
--- a/{{cookiecutter.project_slug}}/compose/production/aws/maintenance/upload
+++ b/{{cookiecutter.project_slug}}/compose/production/aws/maintenance/upload
@@ -3,7 +3,7 @@
### Upload the /backups folder to Amazon S3
###
### Usage:
-### $ docker compose -f production.yml run --rm awscli upload
+### $ docker compose -f docker-compose.production.yml run --rm awscli upload
set -o errexit
set -o pipefail
diff --git a/{{cookiecutter.project_slug}}/compose/production/django/Dockerfile b/{{cookiecutter.project_slug}}/compose/production/django/Dockerfile
index 8c000016a..ee3b9994a 100644
--- a/{{cookiecutter.project_slug}}/compose/production/django/Dockerfile
+++ b/{{cookiecutter.project_slug}}/compose/production/django/Dockerfile
@@ -1,5 +1,5 @@
{% if cookiecutter.frontend_pipeline in ['Gulp', 'Webpack'] -%}
-FROM docker.io/node:20-bookworm-slim as client-builder
+FROM docker.io/node:20-bookworm-slim AS client-builder
ARG APP_HOME=/app
WORKDIR ${APP_HOME}
@@ -25,10 +25,10 @@ RUN npm run build
{%- endif %}
# define an alias for the specific python version used in this file.
-FROM docker.io/python:3.11.8-slim-bookworm as python
+FROM docker.io/python:3.12.4-slim-bookworm AS python
# Python build stage
-FROM docker.io/python as python-build-stage
+FROM python AS python-build-stage
ARG BUILD_ENVIRONMENT=production
@@ -36,7 +36,7 @@ ARG BUILD_ENVIRONMENT=production
RUN apt-get update && apt-get install --no-install-recommends -y \
# dependencies for building Python packages
build-essential \
- # psycopg2 dependencies
+ # psycopg dependencies
libpq-dev
# Requirements are installed here to ensure they will be cached.
@@ -48,7 +48,7 @@ RUN pip wheel --wheel-dir /usr/src/app/wheels \
# Python 'run' stage
-FROM docker.io/python as python-run-stage
+FROM python AS python-run-stage
ARG BUILD_ENVIRONMENT=production
ARG APP_HOME=/app
@@ -65,7 +65,7 @@ RUN addgroup --system django \
# Install required system dependencies
RUN apt-get update && apt-get install --no-install-recommends -y \
- # psycopg2 dependencies
+ # psycopg dependencies
libpq-dev \
# Translations dependencies
gettext \
diff --git a/{{cookiecutter.project_slug}}/compose/production/django/celery/flower/start b/{{cookiecutter.project_slug}}/compose/production/django/celery/flower/start
index 4180d6778..f903a05a2 100644
--- a/{{cookiecutter.project_slug}}/compose/production/django/celery/flower/start
+++ b/{{cookiecutter.project_slug}}/compose/production/django/celery/flower/start
@@ -4,6 +4,14 @@ set -o errexit
set -o nounset
+
+until timeout 10 celery -A config.celery_app inspect ping; do
+ >&2 echo "Celery workers not available"
+done
+
+echo 'Starting flower'
+
+
exec celery \
-A config.celery_app \
-b "${CELERY_BROKER_URL}" \
diff --git a/{{cookiecutter.project_slug}}/compose/production/django/start b/{{cookiecutter.project_slug}}/compose/production/django/start
index 73f686bd7..38fc29b55 100644
--- a/{{cookiecutter.project_slug}}/compose/production/django/start
+++ b/{{cookiecutter.project_slug}}/compose/production/django/start
@@ -28,7 +28,7 @@ if compress_enabled; then
fi
{%- endif %}
{%- if cookiecutter.use_async == 'y' %}
-exec /usr/local/bin/gunicorn config.asgi --bind 0.0.0.0:5000 --chdir=/app -k uvicorn.workers.UvicornWorker
+exec /usr/local/bin/gunicorn config.asgi --bind 0.0.0.0:5000 --chdir=/app -k uvicorn_worker.UvicornWorker
{%- else %}
exec /usr/local/bin/gunicorn config.wsgi --bind 0.0.0.0:5000 --chdir=/app
{%- endif %}
diff --git a/{{cookiecutter.project_slug}}/compose/production/traefik/Dockerfile b/{{cookiecutter.project_slug}}/compose/production/traefik/Dockerfile
index ea918e911..d54bf27ca 100644
--- a/{{cookiecutter.project_slug}}/compose/production/traefik/Dockerfile
+++ b/{{cookiecutter.project_slug}}/compose/production/traefik/Dockerfile
@@ -1,4 +1,4 @@
-FROM docker.io/traefik:2.11.0
+FROM docker.io/traefik:2.11.2
RUN mkdir -p /etc/traefik/acme \
&& touch /etc/traefik/acme/acme.json \
&& chmod 600 /etc/traefik/acme/acme.json
diff --git a/{{cookiecutter.project_slug}}/config/settings/base.py b/{{cookiecutter.project_slug}}/config/settings/base.py
index b7eb7e80f..f1039b748 100644
--- a/{{cookiecutter.project_slug}}/config/settings/base.py
+++ b/{{cookiecutter.project_slug}}/config/settings/base.py
@@ -372,6 +372,7 @@ SPECTACULAR_SETTINGS = {
"DESCRIPTION": "Documentation of API endpoints of {{ cookiecutter.project_name }}",
"VERSION": "1.0.0",
"SERVE_PERMISSIONS": ["rest_framework.permissions.IsAdminUser"],
+ "SCHEMA_PATH_PREFIX": "/api/",
}
{%- endif %}
{%- if cookiecutter.frontend_pipeline == 'Webpack' %}
diff --git a/{{cookiecutter.project_slug}}/config/settings/local.py b/{{cookiecutter.project_slug}}/config/settings/local.py
index f1edb514b..f63151239 100644
--- a/{{cookiecutter.project_slug}}/config/settings/local.py
+++ b/{{cookiecutter.project_slug}}/config/settings/local.py
@@ -64,7 +64,12 @@ INSTALLED_APPS += ["debug_toolbar"]
MIDDLEWARE += ["debug_toolbar.middleware.DebugToolbarMiddleware"]
# https://django-debug-toolbar.readthedocs.io/en/latest/configuration.html#debug-toolbar-config
DEBUG_TOOLBAR_CONFIG = {
- "DISABLE_PANELS": ["debug_toolbar.panels.redirects.RedirectsPanel"],
+ "DISABLE_PANELS": [
+ "debug_toolbar.panels.redirects.RedirectsPanel",
+ # Disable profiling panel due to an issue with Python 3.12:
+ # https://github.com/jazzband/django-debug-toolbar/issues/1875
+ "debug_toolbar.panels.profiling.ProfilingPanel",
+ ],
"SHOW_TEMPLATE_CONTEXT": True,
}
# https://django-debug-toolbar.readthedocs.io/en/latest/installation.html#internal-ips
@@ -83,6 +88,15 @@ if env("USE_DOCKER") == "yes":
# The node container isn't started (yet?)
pass
{%- endif %}
+ {%- if cookiecutter.windows == 'y' %}
+ # RunServerPlus
+ # ------------------------------------------------------------------------------
+ # This is a custom setting for RunServerPlus to fix reloader issue in Windows docker environment
+ # Werkzeug reloader type [auto, watchdog, or stat]
+ RUNSERVERPLUS_POLLER_RELOADER_TYPE = 'stat'
+ # If you have CPU and IO load issues, you can increase this poller interval e.g) 5
+ RUNSERVERPLUS_POLLER_RELOADER_INTERVAL = 1
+ {%- endif %}
{%- endif %}
# django-extensions
diff --git a/{{cookiecutter.project_slug}}/config/settings/production.py b/{{cookiecutter.project_slug}}/config/settings/production.py
index 0cebe1d96..706d04e0a 100644
--- a/{{cookiecutter.project_slug}}/config/settings/production.py
+++ b/{{cookiecutter.project_slug}}/config/settings/production.py
@@ -55,8 +55,12 @@ SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
SECURE_SSL_REDIRECT = env.bool("DJANGO_SECURE_SSL_REDIRECT", default=True)
# https://docs.djangoproject.com/en/dev/ref/settings/#session-cookie-secure
SESSION_COOKIE_SECURE = True
+# https://docs.djangoproject.com/en/dev/ref/settings/#session-cookie-name
+SESSION_COOKIE_NAME = "__Secure-sessionid"
# https://docs.djangoproject.com/en/dev/ref/settings/#csrf-cookie-secure
CSRF_COOKIE_SECURE = True
+# https://docs.djangoproject.com/en/dev/ref/settings/#csrf-cookie-name
+CSRF_COOKIE_NAME = "__Secure-csrftoken"
# https://docs.djangoproject.com/en/dev/topics/security/#ssl-https
# https://docs.djangoproject.com/en/dev/ref/settings/#secure-hsts-seconds
# TODO: set this to 60 seconds first and then to 518400 once you prove the former works
@@ -118,7 +122,7 @@ AZURE_CONTAINER = env("DJANGO_AZURE_CONTAINER_NAME")
# STATIC & MEDIA
# ------------------------
STORAGES = {
-{%- if cookiecutter.use_whitenoise == 'y' %}
+{%- if cookiecutter.use_whitenoise == 'y' and cookiecutter.cloud_provider == 'None' %}
"default": {
"BACKEND": "django.core.files.storage.FileSystemStorage",
},
@@ -133,6 +137,11 @@ STORAGES = {
"file_overwrite": False,
},
},
+ {%- if cookiecutter.use_whitenoise == 'y' %}
+ "staticfiles": {
+ "BACKEND": "whitenoise.storage.CompressedManifestStaticFilesStorage",
+ },
+ {%- else %}
"staticfiles": {
"BACKEND": "storages.backends.s3.S3Storage",
"OPTIONS": {
@@ -140,6 +149,7 @@ STORAGES = {
"default_acl": "public-read",
},
},
+ {%- endif %}
{%- elif cookiecutter.cloud_provider == 'GCP' %}
"default": {
"BACKEND": "storages.backends.gcloud.GoogleCloudStorage",
@@ -148,6 +158,11 @@ STORAGES = {
"file_overwrite": False,
},
},
+ {%- if cookiecutter.use_whitenoise == 'y' %}
+ "staticfiles": {
+ "BACKEND": "whitenoise.storage.CompressedManifestStaticFilesStorage",
+ },
+ {%- else %}
"staticfiles": {
"BACKEND": "storages.backends.gcloud.GoogleCloudStorage",
"OPTIONS": {
@@ -155,6 +170,7 @@ STORAGES = {
"default_acl": "publicRead",
},
},
+ {%- endif %}
{%- elif cookiecutter.cloud_provider == 'Azure' %}
"default": {
"BACKEND": "storages.backends.azure_storage.AzureStorage",
@@ -163,28 +179,40 @@ STORAGES = {
"file_overwrite": False,
},
},
+ {%- if cookiecutter.use_whitenoise == 'y' %}
+ "staticfiles": {
+ "BACKEND": "whitenoise.storage.CompressedManifestStaticFilesStorage",
+ },
+ {%- else %}
"staticfiles": {
"BACKEND": "storages.backends.azure_storage.AzureStorage",
"OPTIONS": {
"location": "static",
},
},
+ {%- endif %}
{%- endif %}
}
{%- endif %}
{%- if cookiecutter.cloud_provider == 'AWS' %}
MEDIA_URL = f"https://{aws_s3_domain}/media/"
-COLLECTFAST_STRATEGY = "collectfast.strategies.boto3.Boto3Strategy"
+{%- if cookiecutter.use_whitenoise == 'n' %}
+COLLECTFASTA_STRATEGY = "collectfasta.strategies.boto3.Boto3Strategy"
STATIC_URL = f"https://{aws_s3_domain}/static/"
+{%- endif %}
{%- elif cookiecutter.cloud_provider == 'GCP' %}
MEDIA_URL = f"https://storage.googleapis.com/{GS_BUCKET_NAME}/media/"
-COLLECTFAST_STRATEGY = "collectfast.strategies.gcloud.GoogleCloudStrategy"
+{%- if cookiecutter.use_whitenoise == 'n' %}
+COLLECTFASTA_STRATEGY = "collectfasta.strategies.gcloud.GoogleCloudStrategy"
STATIC_URL = f"https://storage.googleapis.com/{GS_BUCKET_NAME}/static/"
+{%- endif %}
{%- elif cookiecutter.cloud_provider == 'Azure' %}
MEDIA_URL = f"https://{AZURE_ACCOUNT_NAME}.blob.core.windows.net/media/"
+{%- if cookiecutter.use_whitenoise == 'n' %}
STATIC_URL = f"https://{AZURE_ACCOUNT_NAME}.blob.core.windows.net/static/"
{%- endif %}
+{%- endif %}
# EMAIL
# ------------------------------------------------------------------------------
@@ -252,12 +280,12 @@ ANYMAIL = {
"SENDGRID_API_KEY": env("SENDGRID_API_KEY"),
"SENDGRID_API_URL": env("SENDGRID_API_URL", default="https://api.sendgrid.com/v3/"),
}
-{%- elif cookiecutter.mail_service == 'SendinBlue' %}
-# https://anymail.readthedocs.io/en/stable/esps/sendinblue/
-EMAIL_BACKEND = "anymail.backends.sendinblue.EmailBackend"
+{%- elif cookiecutter.mail_service == 'Brevo' %}
+# https://anymail.readthedocs.io/en/stable/esps/brevo/
+EMAIL_BACKEND = "anymail.backends.brevo.EmailBackend"
ANYMAIL = {
- "SENDINBLUE_API_KEY": env("SENDINBLUE_API_KEY"),
- "SENDINBLUE_API_URL": env("SENDINBLUE_API_URL", default="https://api.sendinblue.com/v3/"),
+ "BREVO_API_KEY": env("BREVO_API_KEY"),
+ "BREVO_API_URL": env("BREVO_API_URL", default="https://api.brevo.com/v3/"),
}
{%- elif cookiecutter.mail_service == 'SparkPost' %}
# https://anymail.readthedocs.io/en/stable/esps/sparkpost/
@@ -285,7 +313,8 @@ COMPRESS_STORAGE = "compressor.storage.GzipCompressorFileStorage"
COMPRESS_STORAGE = STORAGES["staticfiles"]["BACKEND"]
{%- endif %}
# https://django-compressor.readthedocs.io/en/latest/settings/#django.conf.settings.COMPRESS_URL
-COMPRESS_URL = STATIC_URL{% if cookiecutter.use_whitenoise == 'y' or cookiecutter.cloud_provider == 'None' %}{% endif %}
+COMPRESS_URL = STATIC_URL{% if cookiecutter.use_whitenoise == 'y' or cookiecutter.cloud_provider == 'None' %} # noqa: F405
+{%- endif -%}
{%- if cookiecutter.use_whitenoise == 'y' %}
# https://django-compressor.readthedocs.io/en/latest/settings/#django.conf.settings.COMPRESS_OFFLINE
COMPRESS_OFFLINE = True # Offline compression is required when using Whitenoise
@@ -300,10 +329,10 @@ COMPRESS_FILTERS = {
}
{% endif %}
{%- if cookiecutter.use_whitenoise == 'n' -%}
-# Collectfast
+# Collectfasta
# ------------------------------------------------------------------------------
-# https://github.com/antonagestam/collectfast#installation
-INSTALLED_APPS = ["collectfast", *INSTALLED_APPS]
+# https://github.com/jasongi/collectfasta#installation
+INSTALLED_APPS = ["collectfasta", *INSTALLED_APPS]
{% endif %}
# LOGGING
# ------------------------------------------------------------------------------
diff --git a/{{cookiecutter.project_slug}}/config/urls.py b/{{cookiecutter.project_slug}}/config/urls.py
index 5d9301b67..aca4352e6 100644
--- a/{{cookiecutter.project_slug}}/config/urls.py
+++ b/{{cookiecutter.project_slug}}/config/urls.py
@@ -43,7 +43,7 @@ urlpatterns += [
# API base url
path("api/", include("config.api_router")),
# DRF auth token
- path("auth-token/", obtain_auth_token),
+ path("api/auth-token/", obtain_auth_token),
path("api/schema/", SpectacularAPIView.as_view(), name="api-schema"),
path(
"api/docs/",
diff --git a/{{cookiecutter.project_slug}}/docker-compose.docs.yml b/{{cookiecutter.project_slug}}/docker-compose.docs.yml
new file mode 100644
index 000000000..215b6c3b7
--- /dev/null
+++ b/{{cookiecutter.project_slug}}/docker-compose.docs.yml
@@ -0,0 +1,16 @@
+services:
+ docs:
+ image: {{ cookiecutter.project_slug }}_local_docs
+ container_name: {{ cookiecutter.project_slug }}_local_docs
+ build:
+ context: .
+ dockerfile: ./compose/local/docs/Dockerfile
+ env_file:
+ - ./.envs/.local/.django
+ volumes:
+ - ./docs:/docs:z
+ - ./config:/app/config:z
+ - ./{{ cookiecutter.project_slug }}:/app/{{ cookiecutter.project_slug }}:z
+ ports:
+ - '9000:9000'
+ command: /start-docs
diff --git a/{{cookiecutter.project_slug}}/local.yml b/{{cookiecutter.project_slug}}/docker-compose.local.yml
similarity index 87%
rename from {{cookiecutter.project_slug}}/local.yml
rename to {{cookiecutter.project_slug}}/docker-compose.local.yml
index d924b739f..eced08ee8 100644
--- a/{{cookiecutter.project_slug}}/local.yml
+++ b/{{cookiecutter.project_slug}}/docker-compose.local.yml
@@ -1,8 +1,7 @@
-version: '3'
-
volumes:
{{ cookiecutter.project_slug }}_local_postgres_data: {}
{{ cookiecutter.project_slug }}_local_postgres_data_backups: {}
+ {% if cookiecutter.use_celery == 'y' %}{{ cookiecutter.project_slug }}_local_redis_data: {}{% endif %}
services:
django:{% if cookiecutter.use_celery == 'y' %} &django{% endif %}
@@ -40,21 +39,6 @@ services:
env_file:
- ./.envs/.local/.postgres
- docs:
- image: {{ cookiecutter.project_slug }}_local_docs
- container_name: {{ cookiecutter.project_slug }}_local_docs
- build:
- context: .
- dockerfile: ./compose/local/docs/Dockerfile
- env_file:
- - ./.envs/.local/.django
- volumes:
- - ./docs:/docs:z
- - ./config:/app/config:z
- - ./{{ cookiecutter.project_slug }}:/app/{{ cookiecutter.project_slug }}:z
- ports:
- - '9000:9000'
- command: /start-docs
{%- if cookiecutter.use_mailpit == 'y' %}
mailpit:
@@ -69,6 +53,10 @@ services:
redis:
image: docker.io/redis:6
container_name: {{ cookiecutter.project_slug }}_local_redis
+ {% if cookiecutter.use_celery == 'y' %}
+ volumes:
+ - {{ cookiecutter.project_slug }}_local_redis_data:/data
+ {% endif %}
celeryworker:
<<: *django
diff --git a/{{cookiecutter.project_slug}}/production.yml b/{{cookiecutter.project_slug}}/docker-compose.production.yml
similarity index 92%
rename from {{cookiecutter.project_slug}}/production.yml
rename to {{cookiecutter.project_slug}}/docker-compose.production.yml
index f7bf5284f..d0d06338d 100644
--- a/{{cookiecutter.project_slug}}/production.yml
+++ b/{{cookiecutter.project_slug}}/docker-compose.production.yml
@@ -1,5 +1,3 @@
-version: '3'
-
volumes:
production_postgres_data: {}
production_postgres_data_backups: {}
@@ -7,6 +5,10 @@ volumes:
{%- if cookiecutter.cloud_provider == 'None' %}
production_django_media: {}
{%- endif %}
+ {% if cookiecutter.use_celery == 'y' %}
+ production_redis_data: {}
+ {% endif %}
+
services:
django:{% if cookiecutter.use_celery == 'y' %} &django{% endif %}
@@ -68,6 +70,12 @@ services:
redis:
image: docker.io/redis:6
+ {% if cookiecutter.use_celery == 'y' %}
+ volumes:
+ - production_redis_data:/data
+ {% endif %}
+
+
{%- if cookiecutter.use_celery == 'y' %}
celeryworker:
@@ -102,7 +110,7 @@ services:
build:
context: .
dockerfile: ./compose/production/nginx/Dockerfile
- image: {{ cookiecutter.project_slug }}_local_nginx
+ image: {{ cookiecutter.project_slug }}_production_nginx
depends_on:
- django
volumes:
diff --git a/{{cookiecutter.project_slug}}/docs/howto.rst b/{{cookiecutter.project_slug}}/docs/howto.rst
index 7d86351cf..944c2b731 100644
--- a/{{cookiecutter.project_slug}}/docs/howto.rst
+++ b/{{cookiecutter.project_slug}}/docs/howto.rst
@@ -15,7 +15,7 @@ from inside the `{{cookiecutter.project_slug}}/docs` directory.
{% else %}
To build and serve docs, use the commands::
- docker compose -f local.yml up docs
+ docker compose -f docker-compose.local.yml up docs
{% endif %}
@@ -26,7 +26,7 @@ Changes to files in `docs/_source` will be picked up and reloaded automatically.
Docstrings to Documentation
----------------------------------------------------------------------
-The sphinx extension `apidoc `_ is used to automatically document code using signatures and docstrings.
+The sphinx extension `apidoc `_ is used to automatically document code using signatures and docstrings.
Numpy or Google style docstrings will be picked up from project files and available for documentation. See the `Napoleon `_ extension for details.
diff --git a/{{cookiecutter.project_slug}}/docs/pycharm/configuration.rst b/{{cookiecutter.project_slug}}/docs/pycharm/configuration.rst
index d8e769167..148854c64 100644
--- a/{{cookiecutter.project_slug}}/docs/pycharm/configuration.rst
+++ b/{{cookiecutter.project_slug}}/docs/pycharm/configuration.rst
@@ -21,7 +21,7 @@ Next, you have to add new remote python interpreter, based on already tested dep
.. image:: images/3.png
-Switch to *Docker Compose* and select `local.yml` file from directory of your project, next set *Service name* to `django`
+Switch to *Docker Compose* and select `docker-compose.local.yml` file from directory of your project, next set *Service name* to `django`
.. image:: images/4.png
diff --git a/{{cookiecutter.project_slug}}/gulpfile.js b/{{cookiecutter.project_slug}}/gulpfile.js
index df434c134..ee6e70282 100644
--- a/{{cookiecutter.project_slug}}/gulpfile.js
+++ b/{{cookiecutter.project_slug}}/gulpfile.js
@@ -106,7 +106,7 @@ function imgCompression() {
function asyncRunServer() {
const cmd = spawn(
'gunicorn',
- ['config.asgi', '-k', 'uvicorn.workers.UvicornWorker', '--reload'],
+ ['config.asgi', '-k', 'uvicorn_worker.UvicornWorker', '--reload'],
{stdio: 'inherit'},
);
cmd.on('close', function (code) {
diff --git a/{{cookiecutter.project_slug}}/locale/README.md b/{{cookiecutter.project_slug}}/locale/README.md
index a514ad10c..7cb6876ad 100644
--- a/{{cookiecutter.project_slug}}/locale/README.md
+++ b/{{cookiecutter.project_slug}}/locale/README.md
@@ -3,7 +3,7 @@
Start by configuring the `LANGUAGES` settings in `base.py`, by uncommenting languages you are willing to support. Then, translations strings will be placed in this folder when running:
```bash
-{% if cookiecutter.use_docker == 'y' %}docker compose -f local.yml run --rm django {% endif %}python manage.py makemessages -all --no-location
+{% if cookiecutter.use_docker == 'y' %}docker compose -f docker-compose.local.yml run --rm django {% endif %}python manage.py makemessages --all --no-location
```
This should generate `django.po` (stands for Portable Object) files under each locale `/LC_MESSAGES/django.po`. Each translatable string in the codebase is collected with its `msgid` and need to be translated as `msgstr`, for example:
@@ -16,7 +16,7 @@ msgstr "utilisateurs"
Once all translations are done, they need to be compiled into `.mo` files (stands for Machine Object), which are the actual binary files used by the application:
```bash
-{% if cookiecutter.use_docker == 'y' %}docker compose -f local.yml run --rm django {% endif %}python manage.py compilemessages
+{% if cookiecutter.use_docker == 'y' %}docker compose -f docker-compose.local.yml run --rm django {% endif %}python manage.py compilemessages
```
Note that the `.po` files are NOT used by the application directly, so if the `.mo` files are out of dates, the content won't appear as translated even if the `.po` files are up-to-date.
diff --git a/{{cookiecutter.project_slug}}/package.json b/{{cookiecutter.project_slug}}/package.json
index 9ca728208..0b17e5e0b 100644
--- a/{{cookiecutter.project_slug}}/package.json
+++ b/{{cookiecutter.project_slug}}/package.json
@@ -12,7 +12,7 @@
"css-loader": "^6.5.1",
"gulp-concat": "^2.6.1",
"concurrently": "^8.0.1",
- "cssnano": "^6.0.0",
+ "cssnano": "^7.0.0",
"gulp": "^4.0.2",
"gulp-imagemin": "^7.1.0",
"gulp-plumber": "^1.2.1",
diff --git a/{{cookiecutter.project_slug}}/pyproject.toml b/{{cookiecutter.project_slug}}/pyproject.toml
index a056c71c3..31e290dff 100644
--- a/{{cookiecutter.project_slug}}/pyproject.toml
+++ b/{{cookiecutter.project_slug}}/pyproject.toml
@@ -1,7 +1,7 @@
# ==== pytest ====
[tool.pytest.ini_options]
minversion = "6.0"
-addopts = "--ds=config.settings.test --reuse-db"
+addopts = "--ds=config.settings.test --reuse-db --import-mode=importlib"
python_files = [
"tests.py",
"test_*.py",
@@ -18,7 +18,7 @@ plugins = ["django_coverage_plugin"]
# ==== mypy ====
[tool.mypy]
-python_version = "3.11"
+python_version = "3.12"
check_untyped_defs = true
ignore_missing_imports = true
warn_unused_ignores = true
@@ -45,7 +45,7 @@ blank_line_after_tag = "load,extends"
close_void_tags = true
format_css = true
format_js = true
-# TODO: remove T002 when fixed https://github.com/Riverside-Healthcare/djLint/issues/687
+# TODO: remove T002 when fixed https://github.com/djlint/djLint/issues/687
ignore = "H006,H030,H031,T002"
include = "H017,H035"
indent = 2
@@ -88,7 +88,7 @@ exclude = [
# Same as Django: https://github.com/cookiecutter/cookiecutter-django/issues/4792.
line-length = 88
indent-width = 4
-target-version = "py311"
+target-version = "py312"
[tool.ruff.lint]
select = [
@@ -150,11 +150,20 @@ select = [
ignore = [
"S101", # Use of assert detected https://docs.astral.sh/ruff/rules/assert/
"RUF012", # Mutable class attributes should be annotated with `typing.ClassVar`
- "SIM102" # sometimes it's better to nest
+ "SIM102", # sometimes it's better to nest
+ "UP038" # Checks for uses of isinstance/issubclass that take a tuple
+ # of types for comparison.
+ # Deactivated because it can make the code slow:
+ # https://github.com/astral-sh/ruff/issues/7871
]
# Allow fix for all enabled rules (when `--fix`) is provided.
fixable = ["ALL"]
unfixable = []
+# The fixes in extend-unsafe-fixes will require
+# provide the `--unsafe-fixes` flag when fixing.
+extend-unsafe-fixes = [
+ "UP038"
+]
# Allow unused variables when underscore-prefixed.
dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
diff --git a/{{cookiecutter.project_slug}}/requirements/base.txt b/{{cookiecutter.project_slug}}/requirements/base.txt
index 3aad4c947..65b49facc 100644
--- a/{{cookiecutter.project_slug}}/requirements/base.txt
+++ b/{{cookiecutter.project_slug}}/requirements/base.txt
@@ -1,50 +1,51 @@
python-slugify==8.0.4 # https://github.com/un33k/python-slugify
-Pillow==10.2.0 # https://github.com/python-pillow/Pillow
+Pillow==10.4.0 # https://github.com/python-pillow/Pillow
{%- if cookiecutter.frontend_pipeline == 'Django Compressor' %}
{%- if cookiecutter.windows == 'y' and cookiecutter.use_docker == 'n' %}
-rcssmin==1.1.0 --install-option="--without-c-extensions" # https://github.com/ndparker/rcssmin
+rcssmin==1.1.2 --install-option="--without-c-extensions" # https://github.com/ndparker/rcssmin
{%- else %}
-rcssmin==1.1.1 # https://github.com/ndparker/rcssmin
+rcssmin==1.1.2 # https://github.com/ndparker/rcssmin
{%- endif %}
{%- endif %}
argon2-cffi==23.1.0 # https://github.com/hynek/argon2_cffi
{%- if cookiecutter.use_whitenoise == 'y' %}
-whitenoise==6.6.0 # https://github.com/evansd/whitenoise
+whitenoise==6.7.0 # https://github.com/evansd/whitenoise
{%- endif %}
-redis==5.0.2 # https://github.com/redis/redis-py
+redis==5.0.7 # https://github.com/redis/redis-py
{%- if cookiecutter.use_docker == "y" or cookiecutter.windows == "n" %}
hiredis==2.3.2 # https://github.com/redis/hiredis-py
{%- endif %}
{%- if cookiecutter.use_celery == "y" %}
-celery==5.3.6 # pyup: < 6.0 # https://github.com/celery/celery
+celery==5.4.0 # pyup: < 6.0 # https://github.com/celery/celery
django-celery-beat==2.6.0 # https://github.com/celery/django-celery-beat
{%- if cookiecutter.use_docker == 'y' %}
flower==2.0.1 # https://github.com/mher/flower
{%- endif %}
{%- endif %}
{%- if cookiecutter.use_async == 'y' %}
-uvicorn[standard]==0.27.1 # https://github.com/encode/uvicorn
+uvicorn[standard]==0.30.1 # https://github.com/encode/uvicorn
+uvicorn-worker==0.2.0 # https://github.com/Kludex/uvicorn-worker
{%- endif %}
# Django
# ------------------------------------------------------------------------------
-django==4.2.11 # pyup: < 5.0 # https://www.djangoproject.com/
+django==4.2.14 # pyup: < 5.0 # https://www.djangoproject.com/
django-environ==0.11.2 # https://github.com/joke2k/django-environ
-django-model-utils==4.4.0 # https://github.com/jazzband/django-model-utils
-django-allauth[mfa]==0.61.1 # https://github.com/pennersr/django-allauth
-django-crispy-forms==2.1 # https://github.com/django-crispy-forms/django-crispy-forms
+django-model-utils==4.5.1 # https://github.com/jazzband/django-model-utils
+django-allauth[mfa]==0.63.6 # https://github.com/pennersr/django-allauth
+django-crispy-forms==2.2 # https://github.com/django-crispy-forms/django-crispy-forms
crispy-bootstrap5==2024.2 # https://github.com/django-crispy-forms/crispy-bootstrap5
{%- if cookiecutter.frontend_pipeline == 'Django Compressor' %}
-django-compressor==4.4 # https://github.com/django-compressor/django-compressor
+django-compressor==4.5 # https://github.com/django-compressor/django-compressor
{%- endif %}
django-redis==5.4.0 # https://github.com/jazzband/django-redis
{%- if cookiecutter.use_drf == 'y' %}
# Django REST Framework
-djangorestframework==3.14.0 # https://github.com/encode/django-rest-framework
-django-cors-headers==4.3.1 # https://github.com/adamchainz/django-cors-headers
+djangorestframework==3.15.2 # https://github.com/encode/django-rest-framework
+django-cors-headers==4.4.0 # https://github.com/adamchainz/django-cors-headers
# DRF-spectacular for api documentation
-drf-spectacular==0.27.1 # https://github.com/tfranzel/drf-spectacular
+drf-spectacular==0.27.2 # https://github.com/tfranzel/drf-spectacular
{%- endif %}
{%- if cookiecutter.frontend_pipeline == 'Webpack' %}
-django-webpack-loader==3.0.1 # https://github.com/django-webpack/django-webpack-loader
+django-webpack-loader==3.1.0 # https://github.com/django-webpack/django-webpack-loader
{%- endif %}
diff --git a/{{cookiecutter.project_slug}}/requirements/local.txt b/{{cookiecutter.project_slug}}/requirements/local.txt
index 4acd2c4dd..6c418272b 100644
--- a/{{cookiecutter.project_slug}}/requirements/local.txt
+++ b/{{cookiecutter.project_slug}}/requirements/local.txt
@@ -1,43 +1,43 @@
-r production.txt
-Werkzeug[watchdog]==3.0.1 # https://github.com/pallets/werkzeug
+Werkzeug[watchdog]==3.0.3 # https://github.com/pallets/werkzeug
ipdb==0.13.13 # https://github.com/gotcha/ipdb
{%- if cookiecutter.use_docker == 'y' %}
-psycopg[c]==3.1.18 # https://github.com/psycopg/psycopg
+psycopg[c]==3.2.1 # https://github.com/psycopg/psycopg
{%- else %}
-psycopg[binary]==3.1.18 # https://github.com/psycopg/psycopg
+psycopg[binary]==3.2.1 # https://github.com/psycopg/psycopg
{%- endif %}
{%- if cookiecutter.use_async == 'y' or cookiecutter.use_celery == 'y' %}
-watchfiles==0.21.0 # https://github.com/samuelcolvin/watchfiles
+watchfiles==0.22.0 # https://github.com/samuelcolvin/watchfiles
{%- endif %}
# Testing
# ------------------------------------------------------------------------------
-mypy==1.7.1 # https://github.com/python/mypy
-django-stubs[compatible-mypy]==4.2.7 # https://github.com/typeddjango/django-stubs
-pytest==8.1.0 # https://github.com/pytest-dev/pytest
+mypy==1.10.0 # https://github.com/python/mypy
+django-stubs[compatible-mypy]==5.0.2 # https://github.com/typeddjango/django-stubs
+pytest==8.2.2 # https://github.com/pytest-dev/pytest
pytest-sugar==1.0.0 # https://github.com/Frozenball/pytest-sugar
{%- if cookiecutter.use_drf == "y" %}
-djangorestframework-stubs[compatible-mypy]==3.14.5 # https://github.com/typeddjango/djangorestframework-stubs
+djangorestframework-stubs[compatible-mypy]==3.15.0 # https://github.com/typeddjango/djangorestframework-stubs
{%- endif %}
# Documentation
# ------------------------------------------------------------------------------
-sphinx==7.2.6 # https://github.com/sphinx-doc/sphinx
-sphinx-autobuild==2024.2.4 # https://github.com/GaretJax/sphinx-autobuild
+sphinx==7.4.5 # https://github.com/sphinx-doc/sphinx
+sphinx-autobuild==2024.4.16 # https://github.com/GaretJax/sphinx-autobuild
# Code quality
# ------------------------------------------------------------------------------
-ruff==0.3.0 # https://github.com/astral-sh/ruff
-coverage==7.4.3 # https://github.com/nedbat/coveragepy
+ruff==0.5.2 # https://github.com/astral-sh/ruff
+coverage==7.6.0 # https://github.com/nedbat/coveragepy
djlint==1.34.1 # https://github.com/Riverside-Healthcare/djLint
-pre-commit==3.6.2 # https://github.com/pre-commit/pre-commit
+pre-commit==3.7.1 # https://github.com/pre-commit/pre-commit
# Django
# ------------------------------------------------------------------------------
factory-boy==3.3.0 # https://github.com/FactoryBoy/factory_boy
-django-debug-toolbar==4.3.0 # https://github.com/jazzband/django-debug-toolbar
+django-debug-toolbar==4.4.6 # https://github.com/jazzband/django-debug-toolbar
django-extensions==3.2.3 # https://github.com/django-extensions/django-extensions
django-coverage-plugin==3.1.0 # https://github.com/nedbat/django_coverage_plugin
pytest-django==4.8.0 # https://github.com/pytest-dev/pytest-django
diff --git a/{{cookiecutter.project_slug}}/requirements/production.txt b/{{cookiecutter.project_slug}}/requirements/production.txt
index d813a8fc4..5fa14841c 100644
--- a/{{cookiecutter.project_slug}}/requirements/production.txt
+++ b/{{cookiecutter.project_slug}}/requirements/production.txt
@@ -2,13 +2,13 @@
-r base.txt
-gunicorn==21.2.0 # https://github.com/benoitc/gunicorn
-psycopg[c]==3.1.18 # https://github.com/psycopg/psycopg
+gunicorn==22.0.0 # https://github.com/benoitc/gunicorn
+psycopg[c]==3.2.1 # https://github.com/psycopg/psycopg
{%- if cookiecutter.use_whitenoise == 'n' %}
-Collectfast==2.2.0 # https://github.com/antonagestam/collectfast
+Collectfasta==3.2.0 # https://github.com/jasongi/collectfasta
{%- endif %}
{%- if cookiecutter.use_sentry == "y" %}
-sentry-sdk==1.40.6 # https://github.com/getsentry/sentry-python
+sentry-sdk==2.10.0 # https://github.com/getsentry/sentry-python
{%- endif %}
{%- if cookiecutter.use_docker == "n" and cookiecutter.windows == "y" %}
hiredis==2.3.2 # https://github.com/redis/hiredis-py
@@ -17,28 +17,28 @@ hiredis==2.3.2 # https://github.com/redis/hiredis-py
# Django
# ------------------------------------------------------------------------------
{%- if cookiecutter.cloud_provider == 'AWS' %}
-django-storages[s3]==1.14.2 # https://github.com/jschneier/django-storages
+django-storages[s3]==1.14.4 # https://github.com/jschneier/django-storages
{%- elif cookiecutter.cloud_provider == 'GCP' %}
-django-storages[google]==1.14.2 # https://github.com/jschneier/django-storages
+django-storages[google]==1.14.4 # https://github.com/jschneier/django-storages
{%- elif cookiecutter.cloud_provider == 'Azure' %}
-django-storages[azure]==1.14.2 # https://github.com/jschneier/django-storages
+django-storages[azure]==1.14.4 # https://github.com/jschneier/django-storages
{%- endif %}
{%- if cookiecutter.mail_service == 'Mailgun' %}
-django-anymail[mailgun]==10.2 # https://github.com/anymail/django-anymail
+django-anymail[mailgun]==11.0.1 # https://github.com/anymail/django-anymail
{%- elif cookiecutter.mail_service == 'Amazon SES' %}
-django-anymail[amazon-ses]==10.2 # https://github.com/anymail/django-anymail
+django-anymail[amazon-ses]==11.0.1 # https://github.com/anymail/django-anymail
{%- elif cookiecutter.mail_service == 'Mailjet' %}
-django-anymail[mailjet]==10.2 # https://github.com/anymail/django-anymail
+django-anymail[mailjet]==11.0.1 # https://github.com/anymail/django-anymail
{%- elif cookiecutter.mail_service == 'Mandrill' %}
-django-anymail[mandrill]==10.2 # https://github.com/anymail/django-anymail
+django-anymail[mandrill]==11.0.1 # https://github.com/anymail/django-anymail
{%- elif cookiecutter.mail_service == 'Postmark' %}
-django-anymail[postmark]==10.2 # https://github.com/anymail/django-anymail
+django-anymail[postmark]==11.0.1 # https://github.com/anymail/django-anymail
{%- elif cookiecutter.mail_service == 'Sendgrid' %}
-django-anymail[sendgrid]==10.2 # https://github.com/anymail/django-anymail
-{%- elif cookiecutter.mail_service == 'SendinBlue' %}
-django-anymail[sendinblue]==10.2 # https://github.com/anymail/django-anymail
+django-anymail[sendgrid]==11.0.1 # https://github.com/anymail/django-anymail
+{%- elif cookiecutter.mail_service == 'Brevo' %}
+django-anymail[brevo]==11.0.1 # https://github.com/anymail/django-anymail
{%- elif cookiecutter.mail_service == 'SparkPost' %}
-django-anymail[sparkpost]==10.2 # https://github.com/anymail/django-anymail
+django-anymail[sparkpost]==11.0.1 # https://github.com/anymail/django-anymail
{%- elif cookiecutter.mail_service == 'Other SMTP' %}
-django-anymail==10.2 # https://github.com/anymail/django-anymail
+django-anymail==11.0.1 # https://github.com/anymail/django-anymail
{%- endif %}
diff --git a/{{cookiecutter.project_slug}}/runtime.txt b/{{cookiecutter.project_slug}}/runtime.txt
index cf3b80423..4ddc7cd66 100644
--- a/{{cookiecutter.project_slug}}/runtime.txt
+++ b/{{cookiecutter.project_slug}}/runtime.txt
@@ -1 +1 @@
-python-3.11.8
+python-3.12.3
diff --git a/{{cookiecutter.project_slug}}/utility/requirements-bionic.apt b/{{cookiecutter.project_slug}}/utility/requirements-bionic.apt
index 1ca82b264..0e1a6572c 100644
--- a/{{cookiecutter.project_slug}}/utility/requirements-bionic.apt
+++ b/{{cookiecutter.project_slug}}/utility/requirements-bionic.apt
@@ -9,7 +9,7 @@ python3-dev
##Pillow, pylibmc
zlib1g-dev
-##Postgresql and psycopg2 dependencies
+##Postgresql and psycopg dependencies
libpq-dev
##Pillow dependencies
diff --git a/{{cookiecutter.project_slug}}/utility/requirements-bookworm.apt b/{{cookiecutter.project_slug}}/utility/requirements-bookworm.apt
index a4910eb6d..f24f6f3da 100644
--- a/{{cookiecutter.project_slug}}/utility/requirements-bookworm.apt
+++ b/{{cookiecutter.project_slug}}/utility/requirements-bookworm.apt
@@ -9,7 +9,7 @@ python3-dev
##Pillow, pylibmc
zlib1g-dev
-##Postgresql and psycopg2 dependencies
+##Postgresql and psycopg dependencies
libpq-dev
##Pillow dependencies
diff --git a/{{cookiecutter.project_slug}}/utility/requirements-bullseye.apt b/{{cookiecutter.project_slug}}/utility/requirements-bullseye.apt
index 60f602873..e8e36b631 100644
--- a/{{cookiecutter.project_slug}}/utility/requirements-bullseye.apt
+++ b/{{cookiecutter.project_slug}}/utility/requirements-bullseye.apt
@@ -9,7 +9,7 @@ python3-dev
##Pillow, pylibmc
zlib1g-dev
-##Postgresql and psycopg2 dependencies
+##Postgresql and psycopg dependencies
libpq-dev
##Pillow dependencies
diff --git a/{{cookiecutter.project_slug}}/utility/requirements-buster.apt b/{{cookiecutter.project_slug}}/utility/requirements-buster.apt
index 75957f40d..f2c81962d 100644
--- a/{{cookiecutter.project_slug}}/utility/requirements-buster.apt
+++ b/{{cookiecutter.project_slug}}/utility/requirements-buster.apt
@@ -9,7 +9,7 @@ python3-dev
##Pillow, pylibmc
zlib1g-dev
-##Postgresql and psycopg2 dependencies
+##Postgresql and psycopg dependencies
libpq-dev
##Pillow dependencies
diff --git a/{{cookiecutter.project_slug}}/utility/requirements-focal.apt b/{{cookiecutter.project_slug}}/utility/requirements-focal.apt
index fe6f21e46..f400b4196 100644
--- a/{{cookiecutter.project_slug}}/utility/requirements-focal.apt
+++ b/{{cookiecutter.project_slug}}/utility/requirements-focal.apt
@@ -9,7 +9,7 @@ python3-dev
##Pillow, pylibmc
zlib1g-dev
-##Postgresql and psycopg2 dependencies
+##Postgresql and psycopg dependencies
libpq-dev
##Pillow dependencies
diff --git a/{{cookiecutter.project_slug}}/utility/requirements-jammy.apt b/{{cookiecutter.project_slug}}/utility/requirements-jammy.apt
index 63d1587e6..ea52472a1 100644
--- a/{{cookiecutter.project_slug}}/utility/requirements-jammy.apt
+++ b/{{cookiecutter.project_slug}}/utility/requirements-jammy.apt
@@ -9,7 +9,7 @@ python3-dev
##Pillow, pylibmc
zlib1g-dev
-##Postgresql and psycopg2 dependencies
+##Postgresql and psycopg dependencies
libpq-dev
##Pillow dependencies
diff --git a/{{cookiecutter.project_slug}}/utility/requirements-jessie.apt b/{{cookiecutter.project_slug}}/utility/requirements-jessie.apt
index 5c49365ba..ebf0e583e 100644
--- a/{{cookiecutter.project_slug}}/utility/requirements-jessie.apt
+++ b/{{cookiecutter.project_slug}}/utility/requirements-jessie.apt
@@ -9,7 +9,7 @@ python3-dev
##Pillow, pylibmc
zlib1g-dev
-##Postgresql and psycopg2 dependencies
+##Postgresql and psycopg dependencies
libpq-dev
##Pillow dependencies
diff --git a/{{cookiecutter.project_slug}}/utility/requirements-stretch.apt b/{{cookiecutter.project_slug}}/utility/requirements-stretch.apt
index a2b3a7e5e..979eca313 100644
--- a/{{cookiecutter.project_slug}}/utility/requirements-stretch.apt
+++ b/{{cookiecutter.project_slug}}/utility/requirements-stretch.apt
@@ -9,7 +9,7 @@ python3-dev
##Pillow, pylibmc
zlib1g-dev
-##Postgresql and psycopg2 dependencies
+##Postgresql and psycopg dependencies
libpq-dev
##Pillow dependencies
diff --git a/{{cookiecutter.project_slug}}/utility/requirements-trusty.apt b/{{cookiecutter.project_slug}}/utility/requirements-trusty.apt
index 455f1a868..954f78375 100644
--- a/{{cookiecutter.project_slug}}/utility/requirements-trusty.apt
+++ b/{{cookiecutter.project_slug}}/utility/requirements-trusty.apt
@@ -9,7 +9,7 @@ python3-dev
##Pillow, pylibmc
zlib1g-dev
-##Postgresql and psycopg2 dependencies
+##Postgresql and psycopg dependencies
libpq-dev
##Pillow dependencies
diff --git a/{{cookiecutter.project_slug}}/utility/requirements-xenial.apt b/{{cookiecutter.project_slug}}/utility/requirements-xenial.apt
index ba84ef167..1784e40c6 100644
--- a/{{cookiecutter.project_slug}}/utility/requirements-xenial.apt
+++ b/{{cookiecutter.project_slug}}/utility/requirements-xenial.apt
@@ -9,7 +9,7 @@ python3-dev
##Pillow, pylibmc
zlib1g-dev
-##Postgresql and psycopg2 dependencies
+##Postgresql and psycopg dependencies
libpq-dev
##Pillow dependencies
diff --git a/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/admin.py b/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/admin.py
index 70f829256..40086de7f 100644
--- a/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/admin.py
+++ b/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/admin.py
@@ -1,7 +1,7 @@
+from allauth.account.decorators import secure_admin_login
from django.conf import settings
from django.contrib import admin
from django.contrib.auth import admin as auth_admin
-from django.contrib.auth.decorators import login_required
from django.utils.translation import gettext_lazy as _
from .forms import UserAdminChangeForm
@@ -11,7 +11,8 @@ from .models import User
if settings.DJANGO_ADMIN_FORCE_ALLAUTH:
# Force the `admin` sign in process to go through the `django-allauth` workflow:
# https://docs.allauth.org/en/latest/common/admin.html#admin
- admin.site.login = login_required(admin.site.login) # type: ignore[method-assign]
+ admin.autodiscover()
+ admin.site.login = secure_admin_login(admin.site.login) # type: ignore[method-assign]
@admin.register(User)
diff --git a/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/forms.py b/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/forms.py
index 830fca60d..35a70101e 100644
--- a/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/forms.py
+++ b/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/forms.py
@@ -10,7 +10,7 @@ from .models import User
class UserAdminChangeForm(admin_forms.UserChangeForm):
- class Meta(admin_forms.UserChangeForm.Meta):
+ class Meta(admin_forms.UserChangeForm.Meta): # type: ignore[name-defined]
model = User
{%- if cookiecutter.username_type == "email" %}
field_classes = {"email": EmailField}
@@ -23,7 +23,7 @@ class UserAdminCreationForm(admin_forms.UserCreationForm):
To change user signup, see UserSignupForm and UserSocialSignupForm.
"""
- class Meta(admin_forms.UserCreationForm.Meta):
+ class Meta(admin_forms.UserCreationForm.Meta): # type: ignore[name-defined]
model = User
{%- if cookiecutter.username_type == "email" %}
fields = ("email",)
diff --git a/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/tests/test_admin.py b/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/tests/test_admin.py
index f802b8ba1..66555c4ea 100644
--- a/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/tests/test_admin.py
+++ b/{{cookiecutter.project_slug}}/{{cookiecutter.project_slug}}/users/tests/test_admin.py
@@ -62,7 +62,7 @@ class TestUserAdmin:
# Reload the admin module to apply the setting change
import {{ cookiecutter.project_slug }}.users.admin as users_admin
- with contextlib.suppress(admin.sites.AlreadyRegistered):
+ with contextlib.suppress(admin.sites.AlreadyRegistered): # type: ignore[attr-defined]
reload(users_admin)
@pytest.mark.django_db()