mirror of
https://github.com/cookiecutter/cookiecutter-django.git
synced 2025-08-07 21:44:52 +03:00
Merge branch 'master' into pyup-update-rcssmin-1.1.1-to-1.1.2
This commit is contained in:
commit
370eebc56d
2
.github/FUNDING.yml
vendored
2
.github/FUNDING.yml
vendored
|
@ -1,5 +1,5 @@
|
|||
# These are supported funding model platforms
|
||||
|
||||
github: [pydanny, browniebroke]
|
||||
github: [pydanny, browniebroke, luzfcb]
|
||||
patreon: feldroy
|
||||
open_collective: cookiecutter-django
|
||||
|
|
127
.github/contributors.json
vendored
127
.github/contributors.json
vendored
|
@ -1115,7 +1115,7 @@
|
|||
"twitter_username": "Qoyyuum"
|
||||
},
|
||||
{
|
||||
"name": "mfosterw",
|
||||
"name": "Matthew Foster Walsh",
|
||||
"github_login": "mfosterw",
|
||||
"twitter_username": ""
|
||||
},
|
||||
|
@ -1473,5 +1473,130 @@
|
|||
"name": "Jakub Boukal",
|
||||
"github_login": "SukiCZ",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "Christian Jauvin",
|
||||
"github_login": "cjauvin",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "Plurific",
|
||||
"github_login": "paulschwenn",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "GitBib",
|
||||
"github_login": "GitBib",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "Freddy",
|
||||
"github_login": "Hraesvelg",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "aiden",
|
||||
"github_login": "anyidea",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "Michael V. Battista",
|
||||
"github_login": "mvbattista",
|
||||
"twitter_username": "mvbattista"
|
||||
},
|
||||
{
|
||||
"name": "Nix Siow",
|
||||
"github_login": "nixsiow",
|
||||
"twitter_username": "nixsiow"
|
||||
},
|
||||
{
|
||||
"name": "Jens Kaeske",
|
||||
"github_login": "jkaeske",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "henningbra",
|
||||
"github_login": "henningbra",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "Paul Wulff",
|
||||
"github_login": "mtmpaulwulff",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "Mounir",
|
||||
"github_login": "mounirmesselmeni",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "JAEGYUN JUNG",
|
||||
"github_login": "TGoddessana",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "Simeon Emanuilov",
|
||||
"github_login": "s-emanuilov",
|
||||
"twitter_username": "s_emanuilov"
|
||||
},
|
||||
{
|
||||
"name": "Patrick Zhang",
|
||||
"github_login": "PatDuJour",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "GvS",
|
||||
"github_login": "GvS666",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "David Păcioianu",
|
||||
"github_login": "DavidPacioianu",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "farwill",
|
||||
"github_login": "farwill",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "quroom",
|
||||
"github_login": "quroom",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "Marios Frixou",
|
||||
"github_login": "frixou89",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "Geo Maciolek",
|
||||
"github_login": "GeoMaciolek",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "Nadav Peretz",
|
||||
"github_login": "nadavperetz",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "Param Kapur",
|
||||
"github_login": "paramkpr",
|
||||
"twitter_username": "ParamKapur"
|
||||
},
|
||||
{
|
||||
"name": "Jason Mok",
|
||||
"github_login": "jasonmokk",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "Manas Mallick",
|
||||
"github_login": "ManDun",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "Alexandr Artemyev",
|
||||
"github_login": "Mogost",
|
||||
"twitter_username": "MOGOST"
|
||||
}
|
||||
]
|
18
.github/dependabot.yml
vendored
18
.github/dependabot.yml
vendored
|
@ -3,13 +3,29 @@
|
|||
|
||||
version: 2
|
||||
updates:
|
||||
# Update Python deps for the template (not the generated project)
|
||||
- package-ecosystem: "pip"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
labels:
|
||||
- "project infrastructure"
|
||||
|
||||
# Update Python deps for the documentation
|
||||
- package-ecosystem: "pip"
|
||||
directory: "docs/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
labels:
|
||||
- "project infrastructure"
|
||||
|
||||
# Update GitHub actions in workflows
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
labels:
|
||||
- "update"
|
||||
- "project infrastructure"
|
||||
|
||||
# Update npm packages
|
||||
- package-ecosystem: "npm"
|
||||
|
|
16
.github/workflows/ci.yml
vendored
16
.github/workflows/ci.yml
vendored
|
@ -23,9 +23,9 @@ jobs:
|
|||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.11"
|
||||
python-version: "3.12"
|
||||
cache: pip
|
||||
- name: Install dependencies
|
||||
run: pip install -r requirements.txt
|
||||
|
@ -54,9 +54,9 @@ jobs:
|
|||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.11"
|
||||
python-version: "3.12"
|
||||
cache: pip
|
||||
- name: Install dependencies
|
||||
run: pip install -r requirements.txt
|
||||
|
@ -98,9 +98,9 @@ jobs:
|
|||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.11"
|
||||
python-version: "3.12"
|
||||
cache: pip
|
||||
cache-dependency-path: |
|
||||
requirements.txt
|
||||
|
@ -108,8 +108,8 @@ jobs:
|
|||
{{cookiecutter.project_slug}}/requirements/local.txt
|
||||
- name: Install dependencies
|
||||
run: pip install -r requirements.txt
|
||||
- uses: actions/setup-node@v3
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "18"
|
||||
node-version: "20"
|
||||
- name: Bare Metal ${{ matrix.script.name }}
|
||||
run: sh tests/test_bare.sh ${{ matrix.script.args }}
|
||||
|
|
4
.github/workflows/django-issue-checker.yml
vendored
4
.github/workflows/django-issue-checker.yml
vendored
|
@ -17,9 +17,9 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.11"
|
||||
python-version: "3.12"
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
|
|
9
.github/workflows/issue-manager.yml
vendored
9
.github/workflows/issue-manager.yml
vendored
|
@ -23,18 +23,25 @@ jobs:
|
|||
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: tiangolo/issue-manager@0.4.0
|
||||
- uses: tiangolo/issue-manager@0.5.0
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
config: >
|
||||
{
|
||||
"answered": {
|
||||
"delay": 864000,
|
||||
"message": "Assuming the question was answered, this will be automatically closed now."
|
||||
},
|
||||
"solved": {
|
||||
"delay": 864000,
|
||||
"message": "Assuming the original issue was solved, it will be automatically closed now."
|
||||
},
|
||||
"waiting": {
|
||||
"delay": 864000,
|
||||
"message": "Automatically closing after waiting for additional info. To re-open, please provide the additional information requested."
|
||||
},
|
||||
"wontfix": {
|
||||
"delay": 864000,
|
||||
"message": "As discussed, we won't be implementing this. Automatically closing."
|
||||
}
|
||||
}
|
||||
|
|
6
.github/workflows/pre-commit-autoupdate.yml
vendored
6
.github/workflows/pre-commit-autoupdate.yml
vendored
|
@ -22,9 +22,9 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.11"
|
||||
python-version: "3.12"
|
||||
|
||||
- name: Install pre-commit
|
||||
run: pip install pre-commit
|
||||
|
@ -37,7 +37,7 @@ jobs:
|
|||
run: pre-commit autoupdate
|
||||
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v5
|
||||
uses: peter-evans/create-pull-request@v6
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
branch: update/pre-commit-autoupdate
|
||||
|
|
6
.github/workflows/update-changelog.yml
vendored
6
.github/workflows/update-changelog.yml
vendored
|
@ -8,7 +8,7 @@ on:
|
|||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
release:
|
||||
update:
|
||||
# Disables this workflow from running in a repository that is not part of the indicated organization/user
|
||||
if: github.repository_owner == 'cookiecutter'
|
||||
|
||||
|
@ -17,9 +17,9 @@ jobs:
|
|||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.11"
|
||||
python-version: "3.12"
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
|
|
6
.github/workflows/update-contributors.yml
vendored
6
.github/workflows/update-contributors.yml
vendored
|
@ -20,9 +20,9 @@ jobs:
|
|||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.11"
|
||||
python-version: "3.12"
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
|
@ -33,7 +33,7 @@ jobs:
|
|||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Commit changes
|
||||
uses: stefanzweifel/git-auto-commit-action@v4.16.0
|
||||
uses: stefanzweifel/git-auto-commit-action@v5.0.1
|
||||
with:
|
||||
commit_message: Update Contributors
|
||||
file_pattern: CONTRIBUTORS.md .github/contributors.json
|
||||
|
|
|
@ -1,9 +1,12 @@
|
|||
exclude: "{{cookiecutter.project_slug}}|.github/contributors.json|CHANGELOG.md|CONTRIBUTORS.md"
|
||||
default_stages: [commit]
|
||||
|
||||
default_language_version:
|
||||
python: python3.12
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.4.0
|
||||
rev: v4.6.0
|
||||
hooks:
|
||||
- id: trailing-whitespace
|
||||
- id: end-of-file-fixer
|
||||
|
@ -17,30 +20,30 @@ repos:
|
|||
- id: detect-private-key
|
||||
|
||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||
rev: "v3.0.3"
|
||||
rev: "v4.0.0-alpha.8"
|
||||
hooks:
|
||||
- id: prettier
|
||||
args: ["--tab-width", "2"]
|
||||
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
rev: v3.14.0
|
||||
rev: v3.16.0
|
||||
hooks:
|
||||
- id: pyupgrade
|
||||
args: [--py311-plus]
|
||||
args: [--py312-plus]
|
||||
exclude: hooks/
|
||||
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 23.9.1
|
||||
rev: 24.4.2
|
||||
hooks:
|
||||
- id: black
|
||||
|
||||
- repo: https://github.com/PyCQA/isort
|
||||
rev: 5.12.0
|
||||
rev: 5.13.2
|
||||
hooks:
|
||||
- id: isort
|
||||
|
||||
- repo: https://github.com/PyCQA/flake8
|
||||
rev: 6.1.0
|
||||
rev: 7.0.0
|
||||
hooks:
|
||||
- id: flake8
|
||||
|
||||
|
|
|
@ -14,8 +14,6 @@ pin: True
|
|||
label_prs: update
|
||||
|
||||
requirements:
|
||||
- "requirements.txt"
|
||||
- "docs/requirements.txt"
|
||||
- "{{cookiecutter.project_slug}}/requirements/base.txt"
|
||||
- "{{cookiecutter.project_slug}}/requirements/local.txt"
|
||||
- "{{cookiecutter.project_slug}}/requirements/production.txt"
|
||||
|
|
|
@ -8,7 +8,7 @@ version: 2
|
|||
build:
|
||||
os: ubuntu-22.04
|
||||
tools:
|
||||
python: "3.11"
|
||||
python: "3.12"
|
||||
|
||||
# Build documentation in the docs/ directory with Sphinx
|
||||
sphinx:
|
||||
|
|
1390
CHANGELOG.md
1390
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
|
@ -18,20 +18,20 @@ This last step is very important, don't start developing from master, it'll caus
|
|||
|
||||
## Testing
|
||||
|
||||
You'll need to run the tests using Python 3.11. We recommend using [tox](https://tox.readthedocs.io/en/latest/) to run the tests. It will automatically create a fresh virtual environment and install our test dependencies, such as [pytest-cookies](https://pypi.python.org/pypi/pytest-cookies/) and [flake8](https://pypi.python.org/pypi/flake8/).
|
||||
You'll need to run the tests using Python 3.12. We recommend using [tox](https://tox.readthedocs.io/en/latest/) to run the tests. It will automatically create a fresh virtual environment and install our test dependencies, such as [pytest-cookies](https://pypi.python.org/pypi/pytest-cookies/) and [flake8](https://pypi.python.org/pypi/flake8/).
|
||||
|
||||
We'll also run the tests on GitHub actions when you send your pull request, but it's a good idea to run them locally before you send it.
|
||||
|
||||
### Installation
|
||||
|
||||
First, make sure that your version of Python is 3.11:
|
||||
First, make sure that your version of Python is 3.12:
|
||||
|
||||
```bash
|
||||
$ python --version
|
||||
Python 3.11.3
|
||||
Python 3.12.2
|
||||
```
|
||||
|
||||
Any version that starts with 3.11 will do. If you need to install it, you can get it from [python.org](https://www.python.org/downloads/).
|
||||
Any version that starts with 3.12 will do. If you need to install it, you can get it from [python.org](https://www.python.org/downloads/).
|
||||
|
||||
Then install `tox`, if not already installed:
|
||||
|
||||
|
@ -66,13 +66,13 @@ $ source venv/bin/activate
|
|||
|
||||
These tests are slower and can be run with or without Docker:
|
||||
|
||||
- Without Docker: `scripts/test_bare.sh` (for bare metal)
|
||||
- With Docker: `scripts/test_docker.sh`
|
||||
- Without Docker: `tests/test_bare.sh` (for bare metal)
|
||||
- With Docker: `tests/test_docker.sh`
|
||||
|
||||
All arguments to these scripts will be passed to the `cookiecutter` CLI, letting you set options, for example:
|
||||
|
||||
```bash
|
||||
$ scripts/test_bare.sh use_celery=y
|
||||
$ tests/test_bare.sh use_celery=y
|
||||
```
|
||||
|
||||
## Submitting a pull request
|
||||
|
|
189
CONTRIBUTORS.md
189
CONTRIBUTORS.md
|
@ -194,6 +194,13 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td>scaramagus</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>aiden</td>
|
||||
<td>
|
||||
<a href="https://github.com/anyidea">anyidea</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Alberto Sanchez</td>
|
||||
<td>
|
||||
|
@ -208,6 +215,13 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Alexandr Artemyev</td>
|
||||
<td>
|
||||
<a href="https://github.com/Mogost">Mogost</a>
|
||||
</td>
|
||||
<td>MOGOST</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Alvaro [Andor]</td>
|
||||
<td>
|
||||
|
@ -509,6 +523,13 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Christian Jauvin</td>
|
||||
<td>
|
||||
<a href="https://github.com/cjauvin">cjauvin</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Christopher Clarke</td>
|
||||
<td>
|
||||
|
@ -642,6 +663,13 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td>DavidDiazPinto</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>David Păcioianu</td>
|
||||
<td>
|
||||
<a href="https://github.com/DavidPacioianu">DavidPacioianu</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Davit Tovmasyan</td>
|
||||
<td>
|
||||
|
@ -796,6 +824,13 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td>fabaff</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>farwill</td>
|
||||
<td>
|
||||
<a href="https://github.com/farwill">farwill</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Fateme Fouladkar</td>
|
||||
<td>
|
||||
|
@ -824,6 +859,13 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Freddy</td>
|
||||
<td>
|
||||
<a href="https://github.com/Hraesvelg">Hraesvelg</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Fuzzwah</td>
|
||||
<td>
|
||||
|
@ -852,6 +894,13 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Geo Maciolek</td>
|
||||
<td>
|
||||
<a href="https://github.com/GeoMaciolek">GeoMaciolek</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>ghazi-git</td>
|
||||
<td>
|
||||
|
@ -866,6 +915,13 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>GitBib</td>
|
||||
<td>
|
||||
<a href="https://github.com/GitBib">GitBib</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Glenn Wiskur</td>
|
||||
<td>
|
||||
|
@ -887,6 +943,13 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>GvS</td>
|
||||
<td>
|
||||
<a href="https://github.com/GvS666">GvS666</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Hamish Durkin</td>
|
||||
<td>
|
||||
|
@ -936,6 +999,13 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>henningbra</td>
|
||||
<td>
|
||||
<a href="https://github.com/henningbra">henningbra</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Henrique G. G. Pereira</td>
|
||||
<td>
|
||||
|
@ -1013,6 +1083,13 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>JAEGYUN JUNG</td>
|
||||
<td>
|
||||
<a href="https://github.com/TGoddessana">TGoddessana</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Jakub Boukal</td>
|
||||
<td>
|
||||
|
@ -1048,6 +1125,20 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Jason Mok</td>
|
||||
<td>
|
||||
<a href="https://github.com/jasonmokk">jasonmokk</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Jens Kaeske</td>
|
||||
<td>
|
||||
<a href="https://github.com/jkaeske">jkaeske</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Jens Nilsson</td>
|
||||
<td>
|
||||
|
@ -1321,6 +1412,13 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td>flyudvik</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Manas Mallick</td>
|
||||
<td>
|
||||
<a href="https://github.com/ManDun">ManDun</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Manjit Pardeshi</td>
|
||||
<td>
|
||||
|
@ -1335,6 +1433,13 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td>marciomazza</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Marios Frixou</td>
|
||||
<td>
|
||||
<a href="https://github.com/frixou89">frixou89</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Martin Blech</td>
|
||||
<td>
|
||||
|
@ -1412,6 +1517,13 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Matthew Foster Walsh</td>
|
||||
<td>
|
||||
<a href="https://github.com/mfosterw">mfosterw</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Matthew Sisley</td>
|
||||
<td>
|
||||
|
@ -1447,13 +1559,6 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>mfosterw</td>
|
||||
<td>
|
||||
<a href="https://github.com/mfosterw">mfosterw</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Michael Gecht</td>
|
||||
<td>
|
||||
|
@ -1468,6 +1573,13 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Michael V. Battista</td>
|
||||
<td>
|
||||
<a href="https://github.com/mvbattista">mvbattista</a>
|
||||
</td>
|
||||
<td>mvbattista</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Mike97M</td>
|
||||
<td>
|
||||
|
@ -1503,6 +1615,13 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Mounir</td>
|
||||
<td>
|
||||
<a href="https://github.com/mounirmesselmeni">mounirmesselmeni</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>mozillazg</td>
|
||||
<td>
|
||||
|
@ -1524,6 +1643,13 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Nadav Peretz</td>
|
||||
<td>
|
||||
<a href="https://github.com/nadavperetz">nadavperetz</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Naveen</td>
|
||||
<td>
|
||||
|
@ -1545,6 +1671,13 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Nix Siow</td>
|
||||
<td>
|
||||
<a href="https://github.com/nixsiow">nixsiow</a>
|
||||
</td>
|
||||
<td>nixsiow</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Noah H</td>
|
||||
<td>
|
||||
|
@ -1580,6 +1713,13 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td>pamelafox</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Param Kapur</td>
|
||||
<td>
|
||||
<a href="https://github.com/paramkpr">paramkpr</a>
|
||||
</td>
|
||||
<td>ParamKapur</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Parbhat Puri</td>
|
||||
<td>
|
||||
|
@ -1594,6 +1734,20 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Patrick Zhang</td>
|
||||
<td>
|
||||
<a href="https://github.com/PatDuJour">PatDuJour</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Paul Wulff</td>
|
||||
<td>
|
||||
<a href="https://github.com/mtmpaulwulff">mtmpaulwulff</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Pawan Chaurasia</td>
|
||||
<td>
|
||||
|
@ -1643,6 +1797,20 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Plurific</td>
|
||||
<td>
|
||||
<a href="https://github.com/paulschwenn">paulschwenn</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>quroom</td>
|
||||
<td>
|
||||
<a href="https://github.com/quroom">quroom</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Raony Guimarães Corrêa</td>
|
||||
<td>
|
||||
|
@ -1762,6 +1930,13 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td>shywn_mrk</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Simeon Emanuilov</td>
|
||||
<td>
|
||||
<a href="https://github.com/s-emanuilov">s-emanuilov</a>
|
||||
</td>
|
||||
<td>s_emanuilov</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Simon Rey</td>
|
||||
<td>
|
||||
|
|
29
README.md
29
README.md
|
@ -6,7 +6,7 @@
|
|||
[](https://github.com/ambv/black)
|
||||
|
||||
[](https://pyup.io/repos/github/cookiecutter/cookiecutter-django/)
|
||||
[](https://discord.gg/uFXweDQc5a)
|
||||
[](https://discord.gg/rAWFUP47d2)
|
||||
[](https://www.codetriage.com/cookiecutter/cookiecutter-django)
|
||||
|
||||
Powered by [Cookiecutter](https://github.com/cookiecutter/cookiecutter), Cookiecutter Django is a framework for jumpstarting
|
||||
|
@ -20,7 +20,7 @@ production-ready Django projects quickly.
|
|||
## Features
|
||||
|
||||
- For Django 4.2
|
||||
- Works with Python 3.11
|
||||
- Works with Python 3.12
|
||||
- Renders Django projects with 100% starting test coverage
|
||||
- Twitter [Bootstrap](https://github.com/twbs/bootstrap) v5
|
||||
- [12-Factor](https://12factor.net) based settings via [django-environ](https://github.com/joke2k/django-environ)
|
||||
|
@ -51,7 +51,7 @@ _These features can be enabled during initial project setup._
|
|||
## Constraints
|
||||
|
||||
- Only maintained 3rd party libraries are used.
|
||||
- Uses PostgreSQL everywhere: 10 - 15 ([MySQL fork](https://github.com/mabdullahadeel/cookiecutter-django-mysql) also available).
|
||||
- Uses PostgreSQL everywhere: 12 - 16 ([MySQL fork](https://github.com/mabdullahadeel/cookiecutter-django-mysql) also available).
|
||||
- Environment variables for configuration (This won't work with Apache/mod_wsgi).
|
||||
|
||||
## Support this Project!
|
||||
|
@ -65,19 +65,15 @@ This project is an open source project run by volunteers. You can sponsor us via
|
|||
|
||||
Projects that provide financial support to the maintainers:
|
||||
|
||||
---
|
||||
### Two Scoops of Django
|
||||
|
||||
<p align="center">
|
||||
<a href="https://www.feldroy.com/products//two-scoops-of-django-3-x"><img src="https://cdn.shopify.com/s/files/1/0304/6901/products/Two-Scoops-of-Django-3-Alpha-Cover_540x_26507b15-e489-470b-8a97-02773dd498d1_1080x.jpg"></a>
|
||||
</p>
|
||||
[](https://www.feldroy.com/two-scoops-press#two-scoops-of-django)
|
||||
|
||||
Two Scoops of Django 3.x is the best ice cream-themed Django reference in the universe!
|
||||
|
||||
### PyUp
|
||||
|
||||
<p align="center">
|
||||
<a href="https://pyup.io/"><img src="https://pyup.io/static/images/logo.png"></a>
|
||||
</p>
|
||||
[](https://pyup.io)
|
||||
|
||||
PyUp brings you automated security and dependency updates used by Google and other organizations. Free for open source projects!
|
||||
|
||||
|
@ -133,12 +129,11 @@ Answer the prompts with your own desired [options](http://cookiecutter-django.re
|
|||
Choose from 1, 2, 3 [1]: 1
|
||||
use_docker [n]: n
|
||||
Select postgresql_version:
|
||||
1 - 15
|
||||
2 - 14
|
||||
3 - 13
|
||||
4 - 12
|
||||
5 - 11
|
||||
6 - 10
|
||||
1 - 16
|
||||
2 - 15
|
||||
3 - 14
|
||||
4 - 13
|
||||
5 - 12
|
||||
Choose from 1, 2, 3, 4, 5 [1]: 1
|
||||
Select cloud_provider:
|
||||
1 - AWS
|
||||
|
@ -152,7 +147,7 @@ Answer the prompts with your own desired [options](http://cookiecutter-django.re
|
|||
4 - Mandrill
|
||||
5 - Postmark
|
||||
6 - Sendgrid
|
||||
7 - SendinBlue
|
||||
7 - Brevo (formerly SendinBlue)
|
||||
8 - SparkPost
|
||||
9 - Other SMTP
|
||||
Choose from 1, 2, 3, 4, 5, 6, 7, 8, 9 [1]: 1
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
"windows": "n",
|
||||
"editor": ["None", "PyCharm", "VS Code"],
|
||||
"use_docker": "n",
|
||||
"postgresql_version": ["15", "14", "13", "12", "11", "10"],
|
||||
"postgresql_version": ["16", "15", "14", "13", "12"],
|
||||
"cloud_provider": ["AWS", "GCP", "Azure", "None"],
|
||||
"mail_service": [
|
||||
"Mailgun",
|
||||
|
@ -27,7 +27,7 @@
|
|||
"Mandrill",
|
||||
"Postmark",
|
||||
"Sendgrid",
|
||||
"SendinBlue",
|
||||
"Brevo",
|
||||
"SparkPost",
|
||||
"Other SMTP"
|
||||
],
|
||||
|
|
|
@ -46,7 +46,7 @@ Run these commands to deploy the project to Heroku:
|
|||
# Assign with AWS_STORAGE_BUCKET_NAME
|
||||
heroku config:set DJANGO_AWS_STORAGE_BUCKET_NAME=
|
||||
|
||||
git push heroku master
|
||||
git push heroku main
|
||||
|
||||
heroku run python manage.py createsuperuser
|
||||
|
||||
|
|
|
@ -37,7 +37,7 @@ Make sure your project is fully committed and pushed up to Bitbucket or Github o
|
|||
mkvirtualenv --python=/usr/bin/python3.10 my-project-name
|
||||
pip install -r requirements/production.txt # may take a few minutes
|
||||
|
||||
.. note:: We're creating the virtualenv using Python 3.10 (``--python=/usr/bin/python3.10```), although Cookiecutter Django generates a project for Python 3.11. This is because, at time of writing, PythonAnywhere only supports Python 3.10. It shouldn't be a problem, but if is, you may try changing the Python version to 3.11 and see if it works. If it does, please let us know, or even better, submit a pull request to update this section.
|
||||
.. note:: We're creating the virtualenv using Python 3.10 (``--python=/usr/bin/python3.10```), although Cookiecutter Django generates a project for Python 3.12. This is because, at time of writing, PythonAnywhere only supports Python 3.10. It shouldn't be a problem, but if is, you may try changing the Python version to 3.12 and see if it works. If it does, please let us know, or even better, submit a pull request to update this section.
|
||||
|
||||
Setting environment variables in the console
|
||||
--------------------------------------------
|
||||
|
|
|
@ -14,7 +14,7 @@ Prerequisites
|
|||
Understanding the Docker Compose Setup
|
||||
--------------------------------------
|
||||
|
||||
Before you begin, check out the ``production.yml`` file in the root of this project. Keep note of how it provides configuration for the following services:
|
||||
Before you begin, check out the ``docker-compose.production.yml`` file in the root of this project. Keep note of how it provides configuration for the following services:
|
||||
|
||||
* ``django``: your application running behind ``Gunicorn``;
|
||||
* ``postgres``: PostgreSQL database with the application's relational data;
|
||||
|
@ -107,7 +107,7 @@ To solve this, you can either:
|
|||
2. create a ``.env`` file in the root of the project with just variables you need. You'll need to also define them in ``.envs/.production/.django`` (hence duplicating them).
|
||||
3. set these variables when running the build command::
|
||||
|
||||
DJANGO_AWS_S3_CUSTOM_DOMAIN=example.com docker compose -f production.yml build``.
|
||||
DJANGO_AWS_S3_CUSTOM_DOMAIN=example.com docker compose -f docker-compose.production.yml build``.
|
||||
|
||||
None of these options are ideal, we're open to suggestions on how to improve this. If you think you have one, please open an issue or a pull request.
|
||||
|
||||
|
@ -122,42 +122,42 @@ Building & Running Production Stack
|
|||
|
||||
You will need to build the stack first. To do that, run::
|
||||
|
||||
docker compose -f production.yml build
|
||||
docker compose -f docker-compose.production.yml build
|
||||
|
||||
Once this is ready, you can run it with::
|
||||
|
||||
docker compose -f production.yml up
|
||||
docker compose -f docker-compose.production.yml up
|
||||
|
||||
To run the stack and detach the containers, run::
|
||||
|
||||
docker compose -f production.yml up -d
|
||||
docker compose -f docker-compose.production.yml up -d
|
||||
|
||||
To run a migration, open up a second terminal and run::
|
||||
|
||||
docker compose -f production.yml run --rm django python manage.py migrate
|
||||
docker compose -f docker-compose.production.yml run --rm django python manage.py migrate
|
||||
|
||||
To create a superuser, run::
|
||||
|
||||
docker compose -f production.yml run --rm django python manage.py createsuperuser
|
||||
docker compose -f docker-compose.production.yml run --rm django python manage.py createsuperuser
|
||||
|
||||
If you need a shell, run::
|
||||
|
||||
docker compose -f production.yml run --rm django python manage.py shell
|
||||
docker compose -f docker-compose.production.yml run --rm django python manage.py shell
|
||||
|
||||
To check the logs out, run::
|
||||
|
||||
docker compose -f production.yml logs
|
||||
docker compose -f docker-compose.production.yml logs
|
||||
|
||||
If you want to scale your application, run::
|
||||
|
||||
docker compose -f production.yml up --scale django=4
|
||||
docker compose -f production.yml up --scale celeryworker=2
|
||||
docker compose -f docker-compose.production.yml up --scale django=4
|
||||
docker compose -f docker-compose.production.yml up --scale celeryworker=2
|
||||
|
||||
.. warning:: don't try to scale ``postgres``, ``celerybeat``, or ``traefik``.
|
||||
|
||||
To see how your containers are doing run::
|
||||
|
||||
docker compose -f production.yml ps
|
||||
docker compose -f docker-compose.production.yml ps
|
||||
|
||||
|
||||
Example: Supervisor
|
||||
|
@ -165,12 +165,12 @@ Example: Supervisor
|
|||
|
||||
Once you are ready with your initial setup, you want to make sure that your application is run by a process manager to
|
||||
survive reboots and auto restarts in case of an error. You can use the process manager you are most familiar with. All
|
||||
it needs to do is to run ``docker compose -f production.yml up`` in your projects root directory.
|
||||
it needs to do is to run ``docker compose -f docker-compose.production.yml up`` in your projects root directory.
|
||||
|
||||
If you are using ``supervisor``, you can use this file as a starting point::
|
||||
|
||||
[program:{{cookiecutter.project_slug}}]
|
||||
command=docker compose -f production.yml up
|
||||
command=docker compose -f docker-compose.production.yml up
|
||||
directory=/path/to/{{cookiecutter.project_slug}}
|
||||
redirect_stderr=true
|
||||
autostart=true
|
||||
|
|
|
@ -32,9 +32,9 @@ Build the Stack
|
|||
|
||||
This can take a while, especially the first time you run this particular command on your development system::
|
||||
|
||||
$ docker compose -f local.yml build
|
||||
$ docker compose -f docker-compose.local.yml build
|
||||
|
||||
Generally, if you want to emulate production environment use ``production.yml`` instead. And this is true for any other actions you might need to perform: whenever a switch is required, just do it!
|
||||
Generally, if you want to emulate production environment use ``docker-compose.production.yml`` instead. And this is true for any other actions you might need to perform: whenever a switch is required, just do it!
|
||||
|
||||
Before doing any git commit, `pre-commit`_ should be installed globally on your local machine, and then::
|
||||
|
||||
|
@ -51,11 +51,11 @@ This brings up both Django and PostgreSQL. The first time it is run it might tak
|
|||
|
||||
Open a terminal at the project root and run the following for local development::
|
||||
|
||||
$ docker compose -f local.yml up
|
||||
$ docker compose -f docker-compose.local.yml up
|
||||
|
||||
You can also set the environment variable ``COMPOSE_FILE`` pointing to ``local.yml`` like this::
|
||||
You can also set the environment variable ``COMPOSE_FILE`` pointing to ``docker-compose.local.yml`` like this::
|
||||
|
||||
$ export COMPOSE_FILE=local.yml
|
||||
$ export COMPOSE_FILE=docker-compose.local.yml
|
||||
|
||||
And then run::
|
||||
|
||||
|
@ -65,16 +65,23 @@ To run in a detached (background) mode, just::
|
|||
|
||||
$ docker compose up -d
|
||||
|
||||
These commands don't run the docs service. In order to run docs service you can run::
|
||||
|
||||
$ docker compose -f docker-compose.docs.yml up
|
||||
|
||||
To run the docs with local services just use::
|
||||
|
||||
$ docker compose -f docker-compose.local.yml -f docker-compose.docs.yml up
|
||||
|
||||
The site should start and be accessible at http://localhost:3000 if you selected Webpack or Gulp as frontend pipeline and http://localhost:8000 otherwise.
|
||||
|
||||
Execute Management Commands
|
||||
---------------------------
|
||||
|
||||
As with any shell command that we wish to run in our container, this is done using the ``docker compose -f local.yml run --rm`` command: ::
|
||||
As with any shell command that we wish to run in our container, this is done using the ``docker compose -f docker-compose.local.yml run --rm`` command: ::
|
||||
|
||||
$ docker compose -f local.yml run --rm django python manage.py migrate
|
||||
$ docker compose -f local.yml run --rm django python manage.py createsuperuser
|
||||
$ docker compose -f docker-compose.local.yml run --rm django python manage.py migrate
|
||||
$ docker compose -f docker-compose.local.yml run --rm django python manage.py createsuperuser
|
||||
|
||||
Here, ``django`` is the target service we are executing the commands against.
|
||||
Also, please note that the ``docker exec`` does not work for running management commands.
|
||||
|
@ -90,7 +97,7 @@ When ``DEBUG`` is set to ``True``, the host is validated against ``['localhost',
|
|||
Configuring the Environment
|
||||
---------------------------
|
||||
|
||||
This is the excerpt from your project's ``local.yml``: ::
|
||||
This is the excerpt from your project's ``docker-compose.local.yml``: ::
|
||||
|
||||
# ...
|
||||
|
||||
|
@ -156,8 +163,8 @@ You have to modify the relevant requirement file: base, local or production by a
|
|||
|
||||
To get this change picked up, you'll need to rebuild the image(s) and restart the running container: ::
|
||||
|
||||
docker compose -f local.yml build
|
||||
docker compose -f local.yml up
|
||||
docker compose -f docker-compose.local.yml build
|
||||
docker compose -f docker-compose.local.yml up
|
||||
|
||||
Debugging
|
||||
~~~~~~~~~
|
||||
|
@ -171,7 +178,7 @@ If you are using the following within your code to debug: ::
|
|||
|
||||
Then you may need to run the following for it to work as desired: ::
|
||||
|
||||
$ docker compose -f local.yml run --rm --service-ports django
|
||||
$ docker compose -f docker-compose.local.yml run --rm --service-ports django
|
||||
|
||||
|
||||
django-debug-toolbar
|
||||
|
@ -224,7 +231,7 @@ Prerequisites:
|
|||
* ``use_docker`` was set to ``y`` on project initialization;
|
||||
* ``use_celery`` was set to ``y`` on project initialization.
|
||||
|
||||
By default, it's enabled both in local and production environments (``local.yml`` and ``production.yml`` Docker Compose configs, respectively) through a ``flower`` service. For added security, ``flower`` requires its clients to provide authentication credentials specified as the corresponding environments' ``.envs/.local/.django`` and ``.envs/.production/.django`` ``CELERY_FLOWER_USER`` and ``CELERY_FLOWER_PASSWORD`` environment variables. Check out ``localhost:5555`` and see for yourself.
|
||||
By default, it's enabled both in local and production environments (``docker-compose.local.yml`` and ``docker-compose.production.yml`` Docker Compose configs, respectively) through a ``flower`` service. For added security, ``flower`` requires its clients to provide authentication credentials specified as the corresponding environments' ``.envs/.local/.django`` and ``.envs/.production/.django`` ``CELERY_FLOWER_USER`` and ``CELERY_FLOWER_PASSWORD`` environment variables. Check out ``localhost:5555`` and see for yourself.
|
||||
|
||||
.. _`Flower`: https://github.com/mher/flower
|
||||
|
||||
|
@ -272,7 +279,7 @@ certs
|
|||
|
||||
Take the certificates that you generated and place them in a folder called ``certs`` in the project's root folder. Assuming that you registered your local hostname as ``my-dev-env.local``, the certificates you will put in the folder should have the names ``my-dev-env.local.crt`` and ``my-dev-env.local.key``.
|
||||
|
||||
local.yml
|
||||
docker-compose.local.yml
|
||||
~~~~~~~~~
|
||||
|
||||
#. Add the ``nginx-proxy`` service. ::
|
||||
|
@ -316,7 +323,7 @@ You should allow the new hostname. ::
|
|||
|
||||
Rebuild your ``docker`` application. ::
|
||||
|
||||
$ docker compose -f local.yml up -d --build
|
||||
$ docker compose -f docker-compose.local.yml up -d --build
|
||||
|
||||
Go to your browser and type in your URL bar ``https://my-dev-env.local``
|
||||
|
||||
|
@ -336,9 +343,9 @@ Webpack
|
|||
|
||||
If you are using Webpack:
|
||||
|
||||
1. On the ``nginx-proxy`` service in ``local.yml``, change ``depends_on`` to ``node`` instead of ``django``.
|
||||
1. On the ``nginx-proxy`` service in ``docker-compose.local.yml``, change ``depends_on`` to ``node`` instead of ``django``.
|
||||
|
||||
2. On the ``node`` service in ``local.yml``, add the following environment configuration:
|
||||
2. On the ``node`` service in ``docker-compose.local.yml``, add the following environment configuration:
|
||||
|
||||
::
|
||||
|
||||
|
|
|
@ -9,7 +9,7 @@ Setting Up Development Environment
|
|||
|
||||
Make sure to have the following on your host:
|
||||
|
||||
* Python 3.11
|
||||
* Python 3.12
|
||||
* PostgreSQL_.
|
||||
* Redis_, if using Celery
|
||||
* Cookiecutter_
|
||||
|
@ -18,7 +18,7 @@ First things first.
|
|||
|
||||
#. Create a virtualenv: ::
|
||||
|
||||
$ python3.11 -m venv <virtual env path>
|
||||
$ python3.12 -m venv <virtual env path>
|
||||
|
||||
#. Activate the virtualenv you have just created: ::
|
||||
|
||||
|
@ -96,6 +96,61 @@ First things first.
|
|||
.. _direnv: https://direnv.net/
|
||||
|
||||
|
||||
Creating Your First Django App
|
||||
-------------------------------
|
||||
|
||||
After setting up your environment, you're ready to add your first app. This project uses the setup from "Two Scoops of Django" with a two-tier layout:
|
||||
|
||||
- **Top Level Repository Root** has config files, documentation, `manage.py`, and more.
|
||||
- **Second Level Django Project Root** is where your Django apps live.
|
||||
- **Second Level Configuration Root** holds settings and URL configurations.
|
||||
|
||||
The project layout looks something like this: ::
|
||||
|
||||
<repository_root>/
|
||||
├── config/
|
||||
│ ├── settings/
|
||||
│ │ ├── __init__.py
|
||||
│ │ ├── base.py
|
||||
│ │ ├── local.py
|
||||
│ │ └── production.py
|
||||
│ ├── urls.py
|
||||
│ └── wsgi.py
|
||||
├── <django_project_root>/
|
||||
│ ├── <name_of_the_app>/
|
||||
│ │ ├── migrations/
|
||||
│ │ ├── admin.py
|
||||
│ │ ├── apps.py
|
||||
│ │ ├── models.py
|
||||
│ │ ├── tests.py
|
||||
│ │ └── views.py
|
||||
│ ├── __init__.py
|
||||
│ └── ...
|
||||
├── requirements/
|
||||
│ ├── base.txt
|
||||
│ ├── local.txt
|
||||
│ └── production.txt
|
||||
├── manage.py
|
||||
├── README.md
|
||||
└── ...
|
||||
|
||||
|
||||
Following this structured approach, here's how to add a new app:
|
||||
|
||||
#. **Create the app** using Django's ``startapp`` command, replacing ``<name-of-the-app>`` with your desired app name: ::
|
||||
|
||||
$ python manage.py startapp <name-of-the-app>
|
||||
|
||||
#. **Move the app** to the Django Project Root, maintaining the project's two-tier structure: ::
|
||||
|
||||
$ mv <name-of-the-app> <django_project_root>/
|
||||
|
||||
#. **Edit the app's apps.py** change ``name = '<name-of-the-app>'`` to ``name = '<django_project_root>.<name-of-the-app>'``.
|
||||
|
||||
#. **Register the new app** by adding it to the ``LOCAL_APPS`` list in ``config/settings/base.py``, integrating it as an official component of your project.
|
||||
|
||||
|
||||
|
||||
Setup Email Backend
|
||||
-------------------
|
||||
|
||||
|
|
|
@ -1,14 +1,14 @@
|
|||
PostgreSQL Backups with Docker
|
||||
==============================
|
||||
|
||||
.. note:: For brevity it is assumed that you will be running the below commands against local environment, however, this is by no means mandatory so feel free to switch to ``production.yml`` when needed.
|
||||
.. note:: For brevity it is assumed that you will be running the below commands against local environment, however, this is by no means mandatory so feel free to switch to ``docker-compose.production.yml`` when needed.
|
||||
|
||||
|
||||
Prerequisites
|
||||
-------------
|
||||
|
||||
#. the project was generated with ``use_docker`` set to ``y``;
|
||||
#. the stack is up and running: ``docker compose -f local.yml up -d postgres``.
|
||||
#. the stack is up and running: ``docker compose -f docker-compose.local.yml up -d postgres``.
|
||||
|
||||
|
||||
Creating a Backup
|
||||
|
@ -16,7 +16,7 @@ Creating a Backup
|
|||
|
||||
To create a backup, run::
|
||||
|
||||
$ docker compose -f local.yml exec postgres backup
|
||||
$ docker compose -f docker-compose.local.yml exec postgres backup
|
||||
|
||||
Assuming your project's database is named ``my_project`` here is what you will see: ::
|
||||
|
||||
|
@ -31,7 +31,7 @@ Viewing the Existing Backups
|
|||
|
||||
To list existing backups, ::
|
||||
|
||||
$ docker compose -f local.yml exec postgres backups
|
||||
$ docker compose -f docker-compose.local.yml exec postgres backups
|
||||
|
||||
These are the sample contents of ``/backups``: ::
|
||||
|
||||
|
@ -55,9 +55,9 @@ With a single backup file copied to ``.`` that would be ::
|
|||
|
||||
$ docker cp 9c5c3f055843:/backups/backup_2018_03_13T09_05_07.sql.gz .
|
||||
|
||||
You can also get the container ID using ``docker compose -f local.yml ps -q postgres`` so if you want to automate your backups, you don't have to check the container ID manually every time. Here is the full command ::
|
||||
You can also get the container ID using ``docker compose -f docker-compose.local.yml ps -q postgres`` so if you want to automate your backups, you don't have to check the container ID manually every time. Here is the full command ::
|
||||
|
||||
$ docker cp $(docker compose -f local.yml ps -q postgres):/backups ./backups
|
||||
$ docker cp $(docker compose -f docker-compose.local.yml ps -q postgres):/backups ./backups
|
||||
|
||||
.. _`command`: https://docs.docker.com/engine/reference/commandline/cp/
|
||||
|
||||
|
@ -66,7 +66,7 @@ Restoring from the Existing Backup
|
|||
|
||||
To restore from one of the backups you have already got (take the ``backup_2018_03_13T09_05_07.sql.gz`` for example), ::
|
||||
|
||||
$ docker compose -f local.yml exec postgres restore backup_2018_03_13T09_05_07.sql.gz
|
||||
$ docker compose -f docker-compose.local.yml exec postgres restore backup_2018_03_13T09_05_07.sql.gz
|
||||
|
||||
You will see something like ::
|
||||
|
||||
|
@ -92,7 +92,36 @@ You will see something like ::
|
|||
|
||||
Backup to Amazon S3
|
||||
----------------------------------
|
||||
|
||||
For uploading your backups to Amazon S3 you can use the aws cli container. There is an upload command for uploading the postgres /backups directory recursively and there is a download command for downloading a specific backup. The default S3 environment variables are used. ::
|
||||
|
||||
$ docker compose -f production.yml run --rm awscli upload
|
||||
$ docker compose -f production.yml run --rm awscli download backup_2018_03_13T09_05_07.sql.gz
|
||||
$ docker compose -f docker-compose.production.yml run --rm awscli upload
|
||||
$ docker compose -f docker-compose.production.yml run --rm awscli download backup_2018_03_13T09_05_07.sql.gz
|
||||
|
||||
Remove Backup
|
||||
----------------------------------
|
||||
|
||||
To remove backup you can use the ``rmbackup`` command. This will remove the backup from the ``/backups`` directory. ::
|
||||
|
||||
$ docker compose -f docker-compose.local.yml exec postgres rmbackup backup_2018_03_13T09_05_07.sql.gz
|
||||
|
||||
|
||||
Upgrading PostgreSQL
|
||||
----------------------------------
|
||||
|
||||
Upgrading PostgreSQL in your project requires a series of carefully executed steps. Start by halting all containers, excluding the postgres container. Following this, create a backup and proceed to remove the outdated data volume. ::
|
||||
|
||||
$ docker compose -f docker-compose.local.yml down
|
||||
$ docker compose -f docker-compose.local.yml up -d postgres
|
||||
$ docker compose -f docker-compose.local.yml run --rm postgres backup
|
||||
$ docker compose -f docker-compose.local.yml down
|
||||
$ docker volume rm my_project_postgres_data
|
||||
|
||||
.. note:: Neglecting to remove the old data volume may lead to issues, such as the new postgres container failing to start with errors like ``FATAL: database files are incompatible with server``, and ``could not translate host name "postgres" to address: Name or service not known``.
|
||||
|
||||
To complete the upgrade, update the PostgreSQL version in the corresponding Dockerfile (e.g. ``compose/production/postgres/Dockerfile``) and build a new version of PostgreSQL. ::
|
||||
|
||||
$ docker compose -f docker-compose.local.yml build postgres
|
||||
$ docker compose -f docker-compose.local.yml up -d postgres
|
||||
$ docker compose -f docker-compose.local.yml run --rm postgres restore backup_2018_03_13T09_05_07.sql.gz
|
||||
$ docker compose -f docker-compose.local.yml up -d
|
||||
|
|
|
@ -11,7 +11,7 @@ After you have set up to `develop locally`_, run the following command from the
|
|||
|
||||
If you set up your project to `develop locally with docker`_, run the following command: ::
|
||||
|
||||
$ docker compose -f local.yml up docs
|
||||
$ docker compose -f docker-compose.docs.yml up
|
||||
|
||||
Navigate to port 9000 on your host to see the documentation. This will be opened automatically at `localhost`_ for local, non-docker development.
|
||||
|
||||
|
|
|
@ -22,6 +22,6 @@ TODO
|
|||
Why doesn't this follow the layout from Two Scoops of Django?
|
||||
-------------------------------------------------------------
|
||||
|
||||
You may notice that some elements of this project do not exactly match what we describe in chapter 3 of `Two Scoops of Django 1.11`_. The reason for that is this project, amongst other things, serves as a test bed for trying out new ideas and concepts. Sometimes they work, sometimes they don't, but the end result is that it won't necessarily match precisely what is described in the book I co-authored.
|
||||
You may notice that some elements of this project do not exactly match what we describe in chapter 3 of `Two Scoops of Django 3.x`_. The reason for that is this project, amongst other things, serves as a test bed for trying out new ideas and concepts. Sometimes they work, sometimes they don't, but the end result is that it won't necessarily match precisely what is described in the book I co-authored.
|
||||
|
||||
.. _Two Scoops of Django 1.11: https://www.feldroy.com/collections/django/products/two-scoops-of-django-1-11
|
||||
.. _Two Scoops of Django 3.x: https://www.feldroy.com/two-scoops-press#two-scoops-of-django
|
||||
|
|
|
@ -28,6 +28,7 @@ Contents
|
|||
faq
|
||||
troubleshooting
|
||||
contributing
|
||||
maintainer-guide
|
||||
|
||||
Indices and tables
|
||||
------------------
|
||||
|
|
|
@ -4,40 +4,30 @@ Linters
|
|||
.. index:: linters
|
||||
|
||||
|
||||
flake8
|
||||
ruff
|
||||
------
|
||||
|
||||
To run flake8: ::
|
||||
Ruff is a Python linter and code formatter, written in Rust.
|
||||
It is a aggregation of flake8, pylint, pyupgrade and many more.
|
||||
|
||||
$ flake8
|
||||
Ruff comes with a linter (``ruff check``) and a formatter (``ruff format``).
|
||||
The linter is a wrapper around flake8, pylint, and other linters,
|
||||
and the formatter is a wrapper around black, isort, and other formatters.
|
||||
|
||||
The config for flake8 is located in setup.cfg. It specifies:
|
||||
To run ruff without modifying your files: ::
|
||||
|
||||
* Set max line length to 120 chars
|
||||
* Exclude ``.tox,.git,*/migrations/*,*/static/CACHE/*,docs,node_modules``
|
||||
$ ruff format --diff .
|
||||
$ ruff check .
|
||||
|
||||
pylint
|
||||
------
|
||||
Ruff is capable of fixing most of the problems it encounters.
|
||||
Be sure you commit first before running `ruff` so you can restore to a savepoint (and amend afterwards to prevent a double commit. : ::
|
||||
|
||||
To run pylint: ::
|
||||
$ ruff format .
|
||||
$ ruff check --fix .
|
||||
# be careful with the --unsafe-fixes option, it can break your code
|
||||
$ ruff check --fix --unsafe-fixes .
|
||||
|
||||
$ pylint <python files that you wish to lint>
|
||||
|
||||
The config for pylint is located in .pylintrc. It specifies:
|
||||
|
||||
* Use the pylint_django plugin. If using Celery, also use pylint_celery.
|
||||
* Set max line length to 120 chars
|
||||
* Disable linting messages for missing docstring and invalid name
|
||||
* max-parents=13
|
||||
|
||||
pycodestyle
|
||||
-----------
|
||||
|
||||
This is included in flake8's checks, but you can also run it separately to see a more detailed report: ::
|
||||
|
||||
$ pycodestyle <python files that you wish to lint>
|
||||
|
||||
The config for pycodestyle is located in setup.cfg. It specifies:
|
||||
|
||||
* Set max line length to 120 chars
|
||||
* Exclude ``.tox,.git,*/migrations/*,*/static/CACHE/*,docs,node_modules``
|
||||
The config for ruff is located in pyproject.toml.
|
||||
On of the most important option is `tool.ruff.lint.select`.
|
||||
`select` determines which linters are run. In example, `DJ <https://docs.astral.sh/ruff/rules/#flake8-django-dj>`_ refers to flake8-django.
|
||||
For a full list of available linters, see `https://docs.astral.sh/ruff/rules/ <https://docs.astral.sh/ruff/rules/>`_
|
||||
|
|
104
docs/maintainer-guide.md
Normal file
104
docs/maintainer-guide.md
Normal file
|
@ -0,0 +1,104 @@
|
|||
# Maintainer guide
|
||||
|
||||
This document is intended for maintainers of the template.
|
||||
|
||||
## Automated updates
|
||||
|
||||
We use 2 separate services to keep our dependencies up-to-date:
|
||||
|
||||
- Dependabot, which manages updates of Python deps of the template, GitHub actions, npm packages and Docker images.
|
||||
- PyUp, which manages the Python deps for the generated project.
|
||||
|
||||
We don't use Dependabot for the generated project deps because our requirements files are templated, and Dependabot fails to parse them. PyUp is -AFAIK- the only service out there that supports having Jinja tags in the requirements file.
|
||||
|
||||
Updates for the template should be labelled as `project infrastructure` while the ones about the generated project should be labelled as `update`. This is use to work in conjunction with our changelog script (see later).
|
||||
|
||||
## Automation scripts
|
||||
|
||||
We have a few workflows which have been automated over time. They usually run using GitHub actions and might need a few small manual actions to work nicely. Some have a few limitations which we should document here.
|
||||
|
||||
### CI
|
||||
|
||||
`ci.yml`
|
||||
|
||||
The CI workflow tries to cover 2 main aspects of the template:
|
||||
|
||||
- Check all combinations to make sure that valid files are generated with no major linting issues. Issues which are fixed by an auto-formatter after generation aren't considered major, and only aim for best effort. This is under the `test` job.
|
||||
- Run more in-depth tests on a few combinations, by installing dependencies, running type checker and the test suite of the generated project. We try to cover docker (`docker` job) and non-docker (`bare` job) setups.
|
||||
|
||||
We also run the deployment checks, but we don't do much more beyond that for testing the production setup.
|
||||
|
||||
### Django issue checker
|
||||
|
||||
`django-issue-checker.yml`
|
||||
|
||||
This workflow runs daily, on schedule, and checks if there is a new major version of Django (not in the pure SemVer sense) released that we are not running, and list our dependencies compatibility.
|
||||
|
||||
For example, at time of writing, we use Django 4.2, but the latest version of Django is 5.0, so the workflow created a ["Django 5.0" issue](https://github.com/cookiecutter/cookiecutter-django/issues/4724) in GitHub, with a compatibility table and keeps it up to date every day.
|
||||
|
||||
#### Limitations
|
||||
|
||||
Here are a few current and past limitations of the script
|
||||
|
||||
- When a new dependency is added to the template, the script fails to update an existing issue
|
||||
- Not sure what happens when a deps is removed
|
||||
- ~~Unable to parse classifiers without minor version~~
|
||||
- ~~Creates an issue even if we are on the latest version~~
|
||||
|
||||
### Issue manager
|
||||
|
||||
`issue-manager.yml`
|
||||
|
||||
A workflow that uses [Sebastian Ramirez' issue-manager](https://github.com/tiangolo/issue-manager) to help us automate issue management. The tag line from the repo explains it well:
|
||||
|
||||
> Automatically close issues or Pull Requests that have a label, after a custom delay, if no one replies back.
|
||||
|
||||
It runs on a schedule as well as when some actions are taken on issues and pull requests.
|
||||
|
||||
We wait 10 days before closing issues, and we have a few customised reasons, which are configured in the workflow itself. The config should be fairly self-explanatory.
|
||||
|
||||
### Pre-commit auto-update
|
||||
|
||||
`pre-commit-autoupdate.yml`
|
||||
|
||||
Run daily, to do `pre-commit autoupdate` on the template as well as the generated project, and opens a pull request with the changes.
|
||||
|
||||
#### Limitations
|
||||
|
||||
- The PR is open as GitHub action which means that CI does NOT run. The documentation for create-pull-request action [explains why](https://github.com/peter-evans/create-pull-request/blob/main/docs/concepts-guidelines.md#triggering-further-workflow-runs).
|
||||
- Some hooks are also installed as local dependencies (via `requirements/local.txt`), but these are updated separately via PyUP.
|
||||
|
||||
### Update changelog
|
||||
|
||||
`update-changelog.yml`
|
||||
|
||||
Run daily at 2AM to update our changelog and create a GitHub release. This runs a custom script which:
|
||||
|
||||
- List all pull requests merged the day before
|
||||
- The release name is calendar based, so `YYYY.MM.DD`
|
||||
- For each PR:
|
||||
- Get the PR title to summarize the change
|
||||
- Look at the PR labels to classify it in a section of the release notes:
|
||||
- anything labelled `project infrastructure` is excluded
|
||||
- label `update` goes in section "Updated"
|
||||
- label `bug` goes in section "Fixed"
|
||||
- label `docs` goes in section "Documentation"
|
||||
- Default to section "Changed"
|
||||
|
||||
With that in mind, when merging changes, it's a good idea to set the labels and rename the PR title to give a good summary of the change, in the context of the changelog.
|
||||
|
||||
#### Limitations
|
||||
|
||||
- Dependabot updates for npm & Docker have a verbose title, try to rename them to be more readable: `Bump webpack-dev-server from 4.15.1 to 5.0.2 in /{{cookiecutter.project_slug}}` -> `Bump webpack-dev-server to 5.0.2`
|
||||
- ~~Dependencies updates for the template repo (tox, cookiecutter, etc...) don't need to appear in changelog, and need to be labelled as `project infrastructure` manually. By default, they come from PyUp labelled as `update`.~~
|
||||
|
||||
### Update contributors
|
||||
|
||||
`update-contributors.yml`
|
||||
|
||||
Runs on each push to master branch. List the 5 most recently merged pull requests and extract their author. If any of the authors is a new one, updates the `.github/contributors.json`, regenerate the `CONTRIBUTORS.md` from it, and push back the changes to master.
|
||||
|
||||
#### Limitations
|
||||
|
||||
- If you merge a pull request from a new contributor, and merge another one right after, the push to master will fail as the remote will be out of date.
|
||||
- If you merge more than 5 pull requests in a row like this, the new contributor might fail to be added.
|
|
@ -66,12 +66,11 @@ use_docker:
|
|||
postgresql_version:
|
||||
Select a PostgreSQL_ version to use. The choices are:
|
||||
|
||||
1. 15
|
||||
2. 14
|
||||
3. 13
|
||||
4. 12
|
||||
5. 11
|
||||
6. 10
|
||||
1. 16
|
||||
2. 15
|
||||
3. 14
|
||||
4. 13
|
||||
5. 12
|
||||
|
||||
cloud_provider:
|
||||
Select a cloud provider for static & media files. The choices are:
|
||||
|
@ -92,7 +91,7 @@ mail_service:
|
|||
4. Mandrill_
|
||||
5. Postmark_
|
||||
6. SendGrid_
|
||||
7. SendinBlue_
|
||||
7. `Brevo (formerly SendinBlue)`_
|
||||
8. SparkPost_
|
||||
9. `Other SMTP`_
|
||||
|
||||
|
@ -175,7 +174,7 @@ debug:
|
|||
.. _Mandrill: http://mandrill.com
|
||||
.. _Postmark: https://postmarkapp.com
|
||||
.. _SendGrid: https://sendgrid.com
|
||||
.. _SendinBlue: https://www.sendinblue.com
|
||||
.. _Brevo (formerly SendinBlue): https://www.brevo.com
|
||||
.. _SparkPost: https://www.sparkpost.com
|
||||
.. _Other SMTP: https://anymail.readthedocs.io/en/stable/
|
||||
|
||||
|
|
|
@ -1,3 +1,3 @@
|
|||
sphinx==7.2.6
|
||||
sphinx-rtd-theme==1.3.0
|
||||
myst-parser==2.0.0
|
||||
sphinx==7.3.7
|
||||
sphinx-rtd-theme==2.0.0
|
||||
myst-parser==3.0.1
|
||||
|
|
|
@ -69,8 +69,8 @@ SENDGRID_API_KEY SENDGRID_API_KEY n/a
|
|||
SENDGRID_GENERATE_MESSAGE_ID True n/a raises error
|
||||
SENDGRID_MERGE_FIELD_FORMAT None n/a raises error
|
||||
SENDGRID_API_URL n/a n/a "https://api.sendgrid.com/v3/"
|
||||
SENDINBLUE_API_KEY SENDINBLUE_API_KEY n/a raises error
|
||||
SENDINBLUE_API_URL n/a n/a "https://api.sendinblue.com/v3/"
|
||||
BREVO_API_KEY BREVO_API_KEY n/a raises error
|
||||
BREVO_API_URL n/a n/a "https://api.brevo.com/v3/"
|
||||
SPARKPOST_API_KEY SPARKPOST_API_KEY n/a raises error
|
||||
SPARKPOST_API_URL n/a n/a "https://api.sparkpost.com/api/v1"
|
||||
======================================= =========================== ============================================== ======================================================================
|
||||
|
|
|
@ -19,7 +19,7 @@ You will get a readout of the `users` app that has already been set up with test
|
|||
|
||||
If you set up your project to `develop locally with docker`_, run the following command: ::
|
||||
|
||||
$ docker compose -f local.yml run --rm django pytest
|
||||
$ docker compose -f docker-compose.local.yml run --rm django pytest
|
||||
|
||||
Targeting particular apps for testing in ``docker`` follows a similar pattern as previously shown above.
|
||||
|
||||
|
@ -36,14 +36,14 @@ Once the tests are complete, in order to see the code coverage, run the followin
|
|||
|
||||
If you're running the project locally with Docker, use these commands instead: ::
|
||||
|
||||
$ docker compose -f local.yml run --rm django coverage run -m pytest
|
||||
$ docker compose -f local.yml run --rm django coverage report
|
||||
$ docker compose -f docker-compose.local.yml run --rm django coverage run -m pytest
|
||||
$ docker compose -f docker-compose.local.yml run --rm django coverage report
|
||||
|
||||
.. note::
|
||||
|
||||
At the root of the project folder, you will find the `pytest.ini` file. You can use this to customize_ the ``pytest`` to your liking.
|
||||
|
||||
There is also the `.coveragerc`. This is the configuration file for the ``coverage`` tool. You can find out more about `configuring`_ ``coverage``.
|
||||
The configuration for ``coverage`` can be found in ``pyproject.toml``. You can find out more about `configuring`_ ``coverage``.
|
||||
|
||||
.. seealso::
|
||||
|
||||
|
|
|
@ -30,7 +30,7 @@ If you recreate the project multiple times with the same name, Docker would pres
|
|||
|
||||
To fix this, you can either:
|
||||
|
||||
- Clear your project-related Docker cache with ``docker compose -f local.yml down --volumes --rmi all``.
|
||||
- Clear your project-related Docker cache with ``docker compose -f docker-compose.local.yml down --volumes --rmi all``.
|
||||
- Use the Docker volume sub-commands to find volumes (`ls`_) and remove them (`rm`_).
|
||||
- Use the `prune`_ command to clear system-wide (use with care!).
|
||||
|
||||
|
|
|
@ -8,6 +8,7 @@ NOTE:
|
|||
TODO: restrict Cookiecutter Django project initialization to
|
||||
Python 3.x environments only
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import json
|
||||
|
@ -77,7 +78,11 @@ def remove_docker_files():
|
|||
shutil.rmtree(".devcontainer")
|
||||
shutil.rmtree("compose")
|
||||
|
||||
file_names = ["local.yml", "production.yml", ".dockerignore"]
|
||||
file_names = [
|
||||
"docker-compose.local.yml",
|
||||
"docker-compose.production.yml",
|
||||
".dockerignore",
|
||||
]
|
||||
for file_name in file_names:
|
||||
os.remove(file_name)
|
||||
if "{{ cookiecutter.editor }}" == "PyCharm":
|
||||
|
@ -429,10 +434,6 @@ def remove_drf_starter_files():
|
|||
os.remove(os.path.join("{{cookiecutter.project_slug}}", "users", "tests", "test_swagger.py"))
|
||||
|
||||
|
||||
def remove_storages_module():
|
||||
os.remove(os.path.join("{{cookiecutter.project_slug}}", "utils", "storages.py"))
|
||||
|
||||
|
||||
def main():
|
||||
debug = "{{ cookiecutter.debug }}".lower() == "y"
|
||||
|
||||
|
@ -499,7 +500,6 @@ def main():
|
|||
WARNING + "You chose to not use any cloud providers nor Docker, "
|
||||
"media files won't be served in production." + TERMINATOR
|
||||
)
|
||||
remove_storages_module()
|
||||
|
||||
if "{{ cookiecutter.use_celery }}".lower() == "n":
|
||||
remove_celery_files()
|
||||
|
|
|
@ -7,6 +7,7 @@ NOTE:
|
|||
TODO: restrict Cookiecutter Django project initialization
|
||||
to Python 3.x environments only
|
||||
"""
|
||||
|
||||
from __future__ import print_function
|
||||
|
||||
import sys
|
||||
|
@ -38,7 +39,7 @@ if "{{ cookiecutter.use_docker }}".lower() == "n":
|
|||
if python_major_version == 2:
|
||||
print(
|
||||
WARNING + "You're running cookiecutter under Python 2, but the generated "
|
||||
"project requires Python 3.11+. Do you want to proceed (y/n)? " + TERMINATOR
|
||||
"project requires Python 3.12+. Do you want to proceed (y/n)? " + TERMINATOR
|
||||
)
|
||||
yes_options, no_options = frozenset(["y"]), frozenset(["n"])
|
||||
while True:
|
||||
|
|
|
@ -15,7 +15,7 @@ norecursedirs = [
|
|||
# ==== black ====
|
||||
[tool.black]
|
||||
line-length = 119
|
||||
target-version = ['py311']
|
||||
target-version = ['py312']
|
||||
|
||||
|
||||
# ==== isort ====
|
||||
|
|
|
@ -1,28 +1,26 @@
|
|||
cookiecutter==2.4.0
|
||||
sh==2.0.6; sys_platform != "win32"
|
||||
cookiecutter==2.6.0
|
||||
sh==2.0.7; sys_platform != "win32"
|
||||
binaryornot==0.4.4
|
||||
|
||||
# Code quality
|
||||
# ------------------------------------------------------------------------------
|
||||
black==23.9.1
|
||||
isort==5.12.0
|
||||
flake8==6.1.0
|
||||
django-upgrade==1.15.0
|
||||
djlint==1.34.0
|
||||
pre-commit==3.4.0
|
||||
ruff==0.4.9
|
||||
django-upgrade==1.18.0
|
||||
djlint==1.34.1
|
||||
pre-commit==3.7.1
|
||||
|
||||
# Testing
|
||||
# ------------------------------------------------------------------------------
|
||||
tox==4.11.3
|
||||
pytest==7.4.2
|
||||
pytest-xdist==3.3.1
|
||||
tox==4.15.1
|
||||
pytest==8.2.2
|
||||
pytest-xdist==3.6.1
|
||||
pytest-cookies==0.7.0
|
||||
pytest-instafail==0.5.0
|
||||
pyyaml==6.0.1
|
||||
|
||||
# Scripting
|
||||
# ------------------------------------------------------------------------------
|
||||
PyGithub==2.1.1
|
||||
gitpython==3.1.37
|
||||
jinja2==3.1.2
|
||||
requests==2.31.0
|
||||
PyGithub==2.3.0
|
||||
gitpython==3.1.43
|
||||
jinja2==3.1.4
|
||||
requests==2.32.3
|
||||
|
|
|
@ -6,6 +6,7 @@ patches, only comparing major and minor version numbers.
|
|||
This script handles when there are multiple Django versions that need
|
||||
to keep up to date.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
|
@ -212,7 +213,7 @@ class GitHubManager:
|
|||
for classifier in package_info["info"]["classifiers"]:
|
||||
# Usually in the form of "Framework :: Django :: 3.2"
|
||||
tokens = classifier.split(" ")
|
||||
if len(tokens) >= 5 and tokens[2].lower() == "django":
|
||||
if len(tokens) >= 5 and tokens[2].lower() == "django" and "." in tokens[4]:
|
||||
version = DjVersion.parse(tokens[4])
|
||||
if len(version) == 2:
|
||||
supported_dj_versions.append(version)
|
||||
|
|
|
@ -40,8 +40,8 @@ def iter_recent_authors():
|
|||
"""
|
||||
Fetch users who opened recently merged pull requests.
|
||||
|
||||
Use Github API to fetch recent authors rather than
|
||||
git CLI to work with Github usernames.
|
||||
Use GitHub API to fetch recent authors rather than
|
||||
git CLI to work with GitHub usernames.
|
||||
"""
|
||||
repo = Github(login_or_token=GITHUB_TOKEN, per_page=5).get_repo(GITHUB_REPO)
|
||||
recent_pulls = repo.get_pulls(state="closed", sort="updated", direction="desc").get_page(0)
|
||||
|
|
4
setup.py
4
setup.py
|
@ -5,7 +5,7 @@ except ImportError:
|
|||
from distutils.core import setup
|
||||
|
||||
# We use calendar versioning
|
||||
version = "2023.09.29"
|
||||
version = "2024.06.15"
|
||||
|
||||
with open("README.md") as readme_file:
|
||||
long_description = readme_file.read()
|
||||
|
@ -30,7 +30,7 @@ setup(
|
|||
"License :: OSI Approved :: BSD License",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
"Programming Language :: Python :: 3.12",
|
||||
"Programming Language :: Python :: Implementation :: CPython",
|
||||
"Topic :: Software Development",
|
||||
],
|
||||
|
|
|
@ -57,12 +57,11 @@ SUPPORTED_COMBINATIONS = [
|
|||
{"editor": "VS Code"},
|
||||
{"use_docker": "y"},
|
||||
{"use_docker": "n"},
|
||||
{"postgresql_version": "16"},
|
||||
{"postgresql_version": "15"},
|
||||
{"postgresql_version": "14"},
|
||||
{"postgresql_version": "13"},
|
||||
{"postgresql_version": "12"},
|
||||
{"postgresql_version": "11"},
|
||||
{"postgresql_version": "10"},
|
||||
{"cloud_provider": "AWS", "use_whitenoise": "y"},
|
||||
{"cloud_provider": "AWS", "use_whitenoise": "n"},
|
||||
{"cloud_provider": "GCP", "use_whitenoise": "y"},
|
||||
|
@ -74,7 +73,7 @@ SUPPORTED_COMBINATIONS = [
|
|||
{"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "Mandrill"},
|
||||
{"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "Postmark"},
|
||||
{"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "Sendgrid"},
|
||||
{"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "SendinBlue"},
|
||||
{"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "Brevo"},
|
||||
{"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "SparkPost"},
|
||||
{"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "Other SMTP"},
|
||||
# Note: cloud_provider=None AND use_whitenoise=n is not supported
|
||||
|
@ -84,7 +83,7 @@ SUPPORTED_COMBINATIONS = [
|
|||
{"cloud_provider": "AWS", "mail_service": "Mandrill"},
|
||||
{"cloud_provider": "AWS", "mail_service": "Postmark"},
|
||||
{"cloud_provider": "AWS", "mail_service": "Sendgrid"},
|
||||
{"cloud_provider": "AWS", "mail_service": "SendinBlue"},
|
||||
{"cloud_provider": "AWS", "mail_service": "Brevo"},
|
||||
{"cloud_provider": "AWS", "mail_service": "SparkPost"},
|
||||
{"cloud_provider": "AWS", "mail_service": "Other SMTP"},
|
||||
{"cloud_provider": "GCP", "mail_service": "Mailgun"},
|
||||
|
@ -92,7 +91,7 @@ SUPPORTED_COMBINATIONS = [
|
|||
{"cloud_provider": "GCP", "mail_service": "Mandrill"},
|
||||
{"cloud_provider": "GCP", "mail_service": "Postmark"},
|
||||
{"cloud_provider": "GCP", "mail_service": "Sendgrid"},
|
||||
{"cloud_provider": "GCP", "mail_service": "SendinBlue"},
|
||||
{"cloud_provider": "GCP", "mail_service": "Brevo"},
|
||||
{"cloud_provider": "GCP", "mail_service": "SparkPost"},
|
||||
{"cloud_provider": "GCP", "mail_service": "Other SMTP"},
|
||||
{"cloud_provider": "Azure", "mail_service": "Mailgun"},
|
||||
|
@ -100,7 +99,7 @@ SUPPORTED_COMBINATIONS = [
|
|||
{"cloud_provider": "Azure", "mail_service": "Mandrill"},
|
||||
{"cloud_provider": "Azure", "mail_service": "Postmark"},
|
||||
{"cloud_provider": "Azure", "mail_service": "Sendgrid"},
|
||||
{"cloud_provider": "Azure", "mail_service": "SendinBlue"},
|
||||
{"cloud_provider": "Azure", "mail_service": "Brevo"},
|
||||
{"cloud_provider": "Azure", "mail_service": "SparkPost"},
|
||||
{"cloud_provider": "Azure", "mail_service": "Other SMTP"},
|
||||
# Note: cloud_providers GCP, Azure, and None
|
||||
|
@ -180,28 +179,25 @@ def test_project_generation(cookies, context, context_override):
|
|||
|
||||
|
||||
@pytest.mark.parametrize("context_override", SUPPORTED_COMBINATIONS, ids=_fixture_id)
|
||||
def test_flake8_passes(cookies, context_override):
|
||||
"""Generated project should pass flake8."""
|
||||
def test_ruff_check_passes(cookies, context_override):
|
||||
"""Generated project should pass ruff check."""
|
||||
result = cookies.bake(extra_context=context_override)
|
||||
|
||||
try:
|
||||
sh.flake8(_cwd=str(result.project_path))
|
||||
sh.ruff("check", ".", _cwd=str(result.project_path))
|
||||
except sh.ErrorReturnCode as e:
|
||||
pytest.fail(e.stdout.decode())
|
||||
|
||||
|
||||
@auto_fixable
|
||||
@pytest.mark.parametrize("context_override", SUPPORTED_COMBINATIONS, ids=_fixture_id)
|
||||
def test_black_passes(cookies, context_override):
|
||||
"""Check whether generated project passes black style."""
|
||||
def test_ruff_format_passes(cookies, context_override):
|
||||
"""Check whether generated project passes ruff format."""
|
||||
result = cookies.bake(extra_context=context_override)
|
||||
|
||||
try:
|
||||
sh.black(
|
||||
"--check",
|
||||
"--diff",
|
||||
"--exclude",
|
||||
"migrations",
|
||||
sh.ruff(
|
||||
"format",
|
||||
".",
|
||||
_cwd=str(result.project_path),
|
||||
)
|
||||
|
@ -251,7 +247,13 @@ def test_djlint_lint_passes(cookies, context_override):
|
|||
# TODO: remove T002 when fixed https://github.com/Riverside-Healthcare/djLint/issues/687
|
||||
ignored_rules = "H006,H030,H031,T002"
|
||||
try:
|
||||
sh.djlint("--lint", "--ignore", f"{autofixable_rules},{ignored_rules}", ".", _cwd=str(result.project_path))
|
||||
sh.djlint(
|
||||
"--lint",
|
||||
"--ignore",
|
||||
f"{autofixable_rules},{ignored_rules}",
|
||||
".",
|
||||
_cwd=str(result.project_path),
|
||||
)
|
||||
except sh.ErrorReturnCode as e:
|
||||
pytest.fail(e.stdout.decode())
|
||||
|
||||
|
@ -272,7 +274,7 @@ def test_djlint_check_passes(cookies, context_override):
|
|||
["use_docker", "expected_test_script"],
|
||||
[
|
||||
("n", "pytest"),
|
||||
("y", "docker compose -f local.yml run django pytest"),
|
||||
("y", "docker compose -f docker-compose.local.yml run django pytest"),
|
||||
],
|
||||
)
|
||||
def test_travis_invokes_pytest(cookies, context, use_docker, expected_test_script):
|
||||
|
@ -287,7 +289,7 @@ def test_travis_invokes_pytest(cookies, context, use_docker, expected_test_scrip
|
|||
with open(f"{result.project_path}/.travis.yml") as travis_yml:
|
||||
try:
|
||||
yml = yaml.safe_load(travis_yml)["jobs"]["include"]
|
||||
assert yml[0]["script"] == ["flake8"]
|
||||
assert yml[0]["script"] == ["ruff check ."]
|
||||
assert yml[1]["script"] == [expected_test_script]
|
||||
except yaml.YAMLError as e:
|
||||
pytest.fail(str(e))
|
||||
|
@ -297,7 +299,7 @@ def test_travis_invokes_pytest(cookies, context, use_docker, expected_test_scrip
|
|||
["use_docker", "expected_test_script"],
|
||||
[
|
||||
("n", "pytest"),
|
||||
("y", "docker compose -f local.yml run django pytest"),
|
||||
("y", "docker compose -f docker-compose.local.yml run django pytest"),
|
||||
],
|
||||
)
|
||||
def test_gitlab_invokes_precommit_and_pytest(cookies, context, use_docker, expected_test_script):
|
||||
|
@ -324,7 +326,7 @@ def test_gitlab_invokes_precommit_and_pytest(cookies, context, use_docker, expec
|
|||
["use_docker", "expected_test_script"],
|
||||
[
|
||||
("n", "pytest"),
|
||||
("y", "docker compose -f local.yml run django pytest"),
|
||||
("y", "docker compose -f docker-compose.local.yml run django pytest"),
|
||||
],
|
||||
)
|
||||
def test_github_invokes_linter_and_pytest(cookies, context, use_docker, expected_test_script):
|
||||
|
|
|
@ -15,28 +15,38 @@ cookiecutter ../../ --no-input --overwrite-if-exists use_docker=y "$@"
|
|||
cd my_awesome_project
|
||||
|
||||
# make sure all images build
|
||||
docker compose -f local.yml build
|
||||
docker compose -f docker-compose.local.yml build
|
||||
|
||||
# run the project's type checks
|
||||
docker compose -f local.yml run django mypy my_awesome_project
|
||||
docker compose -f docker-compose.local.yml run django mypy my_awesome_project
|
||||
|
||||
# run the project's tests
|
||||
docker compose -f local.yml run django pytest
|
||||
docker compose -f docker-compose.local.yml run django pytest
|
||||
|
||||
# return non-zero status code if there are migrations that have not been created
|
||||
docker compose -f local.yml run django python manage.py makemigrations --dry-run --check || { echo "ERROR: there were changes in the models, but migration listed above have not been created and are not saved in version control"; exit 1; }
|
||||
docker compose -f docker-compose.local.yml run django python manage.py makemigrations --dry-run --check || { echo "ERROR: there were changes in the models, but migration listed above have not been created and are not saved in version control"; exit 1; }
|
||||
|
||||
# Test support for translations
|
||||
docker compose -f local.yml run django python manage.py makemessages --all
|
||||
docker compose -f docker-compose.local.yml run django python manage.py makemessages --all
|
||||
|
||||
# Make sure the check doesn't raise any warnings
|
||||
docker compose -f local.yml run django python manage.py check --fail-level WARNING
|
||||
docker compose -f docker-compose.local.yml run \
|
||||
-e DJANGO_SECRET_KEY="$(openssl rand -base64 64)" \
|
||||
-e REDIS_URL=redis://redis:6379/0 \
|
||||
-e CELERY_BROKER_URL=redis://redis:6379/0 \
|
||||
-e DJANGO_AWS_ACCESS_KEY_ID=x \
|
||||
-e DJANGO_AWS_SECRET_ACCESS_KEY=x \
|
||||
-e DJANGO_AWS_STORAGE_BUCKET_NAME=x \
|
||||
-e DJANGO_ADMIN_URL=x \
|
||||
-e MAILGUN_API_KEY=x \
|
||||
-e MAILGUN_DOMAIN=x \
|
||||
django python manage.py check --settings=config.settings.production --deploy --database default --fail-level WARNING
|
||||
|
||||
# Generate the HTML for the documentation
|
||||
docker compose -f local.yml run docs make html
|
||||
docker compose -f docker-compose.docs.yml run docs make html
|
||||
|
||||
# Run npm build script if package.json is present
|
||||
if [ -f "package.json" ]
|
||||
then
|
||||
docker compose -f local.yml run node npm run build
|
||||
docker compose -f docker-compose.local.yml run node npm run build
|
||||
fi
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
"""Unit tests for the hooks"""
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
|
|
2
tox.ini
2
tox.ini
|
@ -1,6 +1,6 @@
|
|||
[tox]
|
||||
skipsdist = true
|
||||
envlist = py311,black-template
|
||||
envlist = py312,black-template
|
||||
|
||||
[testenv]
|
||||
deps = -rrequirements.txt
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
{
|
||||
"name": "{{cookiecutter.project_slug}}_dev",
|
||||
"dockerComposeFile": [
|
||||
"../local.yml"
|
||||
"../docker-compose.local.yml"
|
||||
],
|
||||
"init": true,
|
||||
"mounts": [
|
||||
|
@ -11,11 +11,6 @@
|
|||
"target": "/home/dev-user/.bash_history",
|
||||
"type": "bind"
|
||||
},
|
||||
{
|
||||
"source": "/tmp",
|
||||
"target": "/tmp",
|
||||
"type": "bind"
|
||||
},
|
||||
{
|
||||
"source": "~/.ssh",
|
||||
"target": "/home/dev-user/.ssh",
|
||||
|
@ -40,24 +35,13 @@
|
|||
"analysis.typeCheckingMode": "basic",
|
||||
"defaultInterpreterPath": "/usr/local/bin/python",
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.organizeImports": true
|
||||
"source.organizeImports": "always"
|
||||
},
|
||||
// Uncomment when fixed
|
||||
// https://github.com/microsoft/vscode-remote-release/issues/8474
|
||||
// "editor.defaultFormatter": "ms-python.black-formatter",
|
||||
"formatting.blackPath": "/usr/local/bin/black",
|
||||
"formatting.provider": "black",
|
||||
"editor.defaultFormatter": "charliermarsh.ruff",
|
||||
"languageServer": "Pylance",
|
||||
// "linting.banditPath": "/usr/local/py-utils/bin/bandit",
|
||||
"linting.enabled": true,
|
||||
"linting.flake8Enabled": true,
|
||||
"linting.flake8Path": "/usr/local/bin/flake8",
|
||||
"linting.mypyEnabled": true,
|
||||
"linting.mypyPath": "/usr/local/bin/mypy",
|
||||
"linting.pycodestylePath": "/usr/local/bin/pycodestyle",
|
||||
// "linting.pydocstylePath": "/usr/local/py-utils/bin/pydocstyle",
|
||||
"linting.pylintEnabled": true,
|
||||
"linting.pylintPath": "/usr/local/bin/pylint"
|
||||
}
|
||||
},
|
||||
// https://code.visualstudio.com/docs/remote/devcontainerjson-reference#_vs-code-specific-properties
|
||||
|
@ -70,8 +54,7 @@
|
|||
// python
|
||||
"ms-python.python",
|
||||
"ms-python.vscode-pylance",
|
||||
"ms-python.isort",
|
||||
"ms-python.black-formatter",
|
||||
"charliermarsh.ruff",
|
||||
// django
|
||||
"batisteo.vscode-django"
|
||||
]
|
||||
|
|
|
@ -13,7 +13,7 @@ environment:
|
|||
steps:
|
||||
- name: lint
|
||||
pull: if-not-exists
|
||||
image: python:3.11
|
||||
image: python:3.12
|
||||
environment:
|
||||
PRE_COMMIT_HOME: ${CI_PROJECT_DIR}/.cache/pre-commit
|
||||
volumes:
|
||||
|
@ -27,16 +27,17 @@ steps:
|
|||
- name: test
|
||||
pull: if-not-exists
|
||||
{%- if cookiecutter.use_docker == 'y' %}
|
||||
image: docker/compose:1.29.2
|
||||
image: docker:25.0
|
||||
environment:
|
||||
DATABASE_URL: pgsql://$POSTGRES_USER:$POSTGRES_PASSWORD@postgres/$POSTGRES_DB
|
||||
commands:
|
||||
- docker-compose -f local.yml build
|
||||
- docker-compose -f local.yml run --rm django python manage.py migrate
|
||||
- docker-compose -f local.yml up -d
|
||||
- docker-compose -f local.yml run django pytest
|
||||
- docker-compose -f docker-compose.local.yml build
|
||||
- docker-compose -f docker-compose.docs.yml build
|
||||
- docker-compose -f docker-compose.local.yml run --rm django python manage.py migrate
|
||||
- docker-compose -f docker-compose.local.yml up -d
|
||||
- docker-compose -f docker-compose.local.yml run django pytest
|
||||
{%- else %}
|
||||
image: python:3.11
|
||||
image: python:3.12
|
||||
commands:
|
||||
- pip install -r requirements/local.txt
|
||||
- pytest
|
||||
|
|
|
@ -28,8 +28,8 @@ POSTMARK_SERVER_TOKEN=
|
|||
SENDGRID_API_KEY=
|
||||
SENDGRID_GENERATE_MESSAGE_ID=True
|
||||
SENDGRID_MERGE_FIELD_FORMAT=None
|
||||
{% elif cookiecutter.mail_service == 'SendinBlue' %}
|
||||
SENDINBLUE_API_KEY=
|
||||
{% elif cookiecutter.mail_service == 'Brevo' %}
|
||||
BREVO_API_KEY=
|
||||
{% elif cookiecutter.mail_service == 'SparkPost' %}
|
||||
SPARKPOST_API_KEY=
|
||||
{% endif %}
|
||||
|
|
|
@ -26,17 +26,17 @@ jobs:
|
|||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.11'
|
||||
python-version: '3.12'
|
||||
|
||||
{%- if cookiecutter.open_source_license != 'Not open source' %}
|
||||
# Consider using pre-commit.ci for open source project
|
||||
{%- endif %}
|
||||
- name: Run pre-commit
|
||||
uses: pre-commit/action@v3.0.0
|
||||
uses: pre-commit/action@v3.0.1
|
||||
|
||||
# With no caching at all the entire ci process takes 4m 30s to complete!
|
||||
# With no caching at all the entire ci process takes 3m to complete!
|
||||
pytest:
|
||||
runs-on: ubuntu-latest
|
||||
{%- if cookiecutter.use_docker == 'n' %}
|
||||
|
@ -69,22 +69,25 @@ jobs:
|
|||
{%- if cookiecutter.use_docker == 'y' %}
|
||||
|
||||
- name: Build the Stack
|
||||
run: docker compose -f local.yml build
|
||||
run: docker compose -f docker-compose.local.yml build django
|
||||
|
||||
- name: Build the docs
|
||||
run: docker compose -f docker-compose.docs.yml build docs
|
||||
|
||||
- name: Run DB Migrations
|
||||
run: docker compose -f local.yml run --rm django python manage.py migrate
|
||||
run: docker compose -f docker-compose.local.yml run --rm django python manage.py migrate
|
||||
|
||||
- name: Run Django Tests
|
||||
run: docker compose -f local.yml run django pytest
|
||||
run: docker compose -f docker-compose.local.yml run django pytest
|
||||
|
||||
- name: Tear down the Stack
|
||||
run: docker compose -f local.yml down
|
||||
run: docker compose -f docker-compose.local.yml down
|
||||
{%- else %}
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
with:
|
||||
python-version: '3.11'
|
||||
python-version: '3.12'
|
||||
cache: pip
|
||||
cache-dependency-path: |
|
||||
requirements/base.txt
|
||||
|
|
|
@ -13,7 +13,7 @@ variables:
|
|||
|
||||
precommit:
|
||||
stage: lint
|
||||
image: python:3.11
|
||||
image: python:3.12
|
||||
variables:
|
||||
PRE_COMMIT_HOME: ${CI_PROJECT_DIR}/.cache/pre-commit
|
||||
cache:
|
||||
|
@ -27,20 +27,21 @@ precommit:
|
|||
pytest:
|
||||
stage: test
|
||||
{%- if cookiecutter.use_docker == 'y' %}
|
||||
image: docker/compose:1.29.2
|
||||
image: docker:25.0
|
||||
tags:
|
||||
- docker
|
||||
services:
|
||||
- docker:dind
|
||||
before_script:
|
||||
- docker compose -f local.yml build
|
||||
- docker compose -f docker-compose.local.yml build
|
||||
- docker compose -f docker-compose.docs.yml build
|
||||
# Ensure celerybeat does not crash due to non-existent tables
|
||||
- docker compose -f local.yml run --rm django python manage.py migrate
|
||||
- docker compose -f local.yml up -d
|
||||
- docker compose -f docker-compose.local.yml run --rm django python manage.py migrate
|
||||
- docker compose -f docker-compose.local.yml up -d
|
||||
script:
|
||||
- docker compose -f local.yml run django pytest
|
||||
- docker compose -f docker-compose.local.yml run django pytest
|
||||
{%- else %}
|
||||
image: python:3.11
|
||||
image: python:3.12
|
||||
tags:
|
||||
- python
|
||||
services:
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
{%- endif %}
|
||||
</list>
|
||||
</option>
|
||||
<option name="sourceFilePath" value="local.yml"/>
|
||||
<option name="sourceFilePath" value="docker-compose.local.yml"/>
|
||||
</settings>
|
||||
</deployment>
|
||||
<method v="2"/>
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
<option value="docs"/>
|
||||
</list>
|
||||
</option>
|
||||
<option name="sourceFilePath" value="local.yml"/>
|
||||
<option name="sourceFilePath" value="docker-compose.local.yml"/>
|
||||
</settings>
|
||||
</deployment>
|
||||
<method v="2"/>
|
||||
|
|
|
@ -1,9 +1,12 @@
|
|||
exclude: '^docs/|/migrations/'
|
||||
exclude: '^docs/|/migrations/|devcontainer.json'
|
||||
default_stages: [commit]
|
||||
|
||||
default_language_version:
|
||||
python: python3.12
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.4.0
|
||||
rev: v4.6.0
|
||||
hooks:
|
||||
- id: trailing-whitespace
|
||||
- id: end-of-file-fixer
|
||||
|
@ -18,41 +21,30 @@ repos:
|
|||
- id: detect-private-key
|
||||
|
||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
||||
rev: v3.0.3
|
||||
rev: v4.0.0-alpha.8
|
||||
hooks:
|
||||
- id: prettier
|
||||
args: ['--tab-width', '2', '--single-quote']
|
||||
exclude: '{{cookiecutter.project_slug}}/templates/'
|
||||
|
||||
- repo: https://github.com/adamchainz/django-upgrade
|
||||
rev: '1.15.0'
|
||||
rev: '1.18.0'
|
||||
hooks:
|
||||
- id: django-upgrade
|
||||
args: ['--target-version', '4.2']
|
||||
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
rev: v3.14.0
|
||||
# Run the Ruff linter.
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.4.9
|
||||
hooks:
|
||||
- id: pyupgrade
|
||||
args: [--py311-plus]
|
||||
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 23.9.1
|
||||
hooks:
|
||||
- id: black
|
||||
|
||||
- repo: https://github.com/PyCQA/isort
|
||||
rev: 5.12.0
|
||||
hooks:
|
||||
- id: isort
|
||||
|
||||
- repo: https://github.com/PyCQA/flake8
|
||||
rev: 6.1.0
|
||||
hooks:
|
||||
- id: flake8
|
||||
# Linter
|
||||
- id: ruff
|
||||
args: [--fix, --exit-non-zero-on-fix]
|
||||
# Formatter
|
||||
- id: ruff-format
|
||||
|
||||
- repo: https://github.com/Riverside-Healthcare/djLint
|
||||
rev: v1.34.0
|
||||
rev: v1.34.1
|
||||
hooks:
|
||||
- id: djlint-reformat-django
|
||||
- id: djlint-django
|
||||
|
|
|
@ -8,7 +8,7 @@ version: 2
|
|||
build:
|
||||
os: ubuntu-22.04
|
||||
tools:
|
||||
python: '3.11'
|
||||
python: '3.12'
|
||||
|
||||
# Build documentation in the docs/ directory with Sphinx
|
||||
sphinx:
|
||||
|
|
|
@ -2,7 +2,7 @@ dist: focal
|
|||
|
||||
language: python
|
||||
python:
|
||||
- "3.11"
|
||||
- "3.12"
|
||||
|
||||
services:
|
||||
- {% if cookiecutter.use_docker == 'y' %}docker{% else %}postgresql{% endif %}
|
||||
|
@ -10,23 +10,24 @@ jobs:
|
|||
include:
|
||||
- name: "Linter"
|
||||
before_script:
|
||||
- pip install -q flake8
|
||||
- pip install -q ruff
|
||||
script:
|
||||
- "flake8"
|
||||
- ruff check .
|
||||
|
||||
- name: "Django Test"
|
||||
{%- if cookiecutter.use_docker == 'y' %}
|
||||
before_script:
|
||||
- docker compose -v
|
||||
- docker -v
|
||||
- docker compose -f local.yml build
|
||||
- docker compose -f docker-compose.local.yml build
|
||||
- docker compose -f docker-compose.docs.yml build
|
||||
# Ensure celerybeat does not crash due to non-existent tables
|
||||
- docker compose -f local.yml run --rm django python manage.py migrate
|
||||
- docker compose -f local.yml up -d
|
||||
- docker compose -f docker-compose.local.yml run --rm django python manage.py migrate
|
||||
- docker compose -f docker-compose.local.yml up -d
|
||||
script:
|
||||
- "docker compose -f local.yml run django pytest"
|
||||
- docker compose -f docker-compose.local.yml run django pytest
|
||||
after_failure:
|
||||
- docker compose -f local.yml logs
|
||||
- docker compose -f docker-compose.local.yml logs
|
||||
{%- else %}
|
||||
before_install:
|
||||
- sudo apt-get update -qq
|
||||
|
@ -37,9 +38,9 @@ jobs:
|
|||
- sudo apt-get install -qq libsqlite3-dev libxml2 libxml2-dev libssl-dev libbz2-dev wget curl llvm
|
||||
language: python
|
||||
python:
|
||||
- "3.11"
|
||||
- "3.12"
|
||||
install:
|
||||
- pip install -r requirements/local.txt
|
||||
script:
|
||||
- "pytest"
|
||||
- pytest
|
||||
{%- endif %}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
release: python manage.py migrate
|
||||
{%- if cookiecutter.use_async == "y" %}
|
||||
web: gunicorn config.asgi:application -k uvicorn.workers.UvicornWorker
|
||||
web: gunicorn config.asgi:application -k uvicorn_worker.UvicornWorker
|
||||
{%- else %}
|
||||
web: gunicorn config.wsgi:application
|
||||
{%- endif %}
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
{{ cookiecutter.description }}
|
||||
|
||||
[](https://github.com/cookiecutter/cookiecutter-django/)
|
||||
[](https://github.com/ambv/black)
|
||||
[](https://github.com/astral-sh/ruff)
|
||||
|
||||
{%- if cookiecutter.open_source_license != "Not open source" %}
|
||||
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# define an alias for the specific python version used in this file.
|
||||
FROM python:3.11.6-slim-bullseye as python
|
||||
FROM docker.io/python:3.12.4-slim-bookworm as python
|
||||
|
||||
# Python build stage
|
||||
FROM python as python-build-stage
|
||||
|
@ -10,7 +10,7 @@ ARG BUILD_ENVIRONMENT=local
|
|||
RUN apt-get update && apt-get install --no-install-recommends -y \
|
||||
# dependencies for building Python packages
|
||||
build-essential \
|
||||
# psycopg2 dependencies
|
||||
# psycopg dependencies
|
||||
libpq-dev
|
||||
|
||||
# Requirements are installed here to ensure they will be cached.
|
||||
|
@ -47,7 +47,7 @@ RUN groupadd --gid 1000 dev-user \
|
|||
|
||||
# Install required system dependencies
|
||||
RUN apt-get update && apt-get install --no-install-recommends -y \
|
||||
# psycopg2 dependencies
|
||||
# psycopg dependencies
|
||||
libpq-dev \
|
||||
# Translations dependencies
|
||||
gettext \
|
||||
|
|
|
@ -3,6 +3,14 @@
|
|||
set -o errexit
|
||||
set -o nounset
|
||||
|
||||
|
||||
until timeout 10 celery -A config.celery_app inspect ping; do
|
||||
>&2 echo "Celery workers not available"
|
||||
done
|
||||
|
||||
echo 'Starting flower'
|
||||
|
||||
|
||||
exec watchfiles --filter python celery.__main__.main \
|
||||
--args \
|
||||
"-A config.celery_app -b \"${CELERY_BROKER_URL}\" flower --basic_auth=\"${CELERY_FLOWER_USER}:${CELERY_FLOWER_PASSWORD}\""
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# define an alias for the specific python version used in this file.
|
||||
FROM python:3.11.5-slim-bullseye as python
|
||||
FROM docker.io/python:3.12.4-slim-bookworm as python
|
||||
|
||||
|
||||
# Python build stage
|
||||
|
@ -10,7 +10,7 @@ ENV PYTHONDONTWRITEBYTECODE 1
|
|||
RUN apt-get update && apt-get install --no-install-recommends -y \
|
||||
# dependencies for building Python packages
|
||||
build-essential \
|
||||
# psycopg2 dependencies
|
||||
# psycopg dependencies
|
||||
libpq-dev \
|
||||
# cleaning up unused files
|
||||
&& apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false \
|
||||
|
@ -35,7 +35,7 @@ ENV PYTHONDONTWRITEBYTECODE 1
|
|||
RUN apt-get update && apt-get install --no-install-recommends -y \
|
||||
# To run the Makefile
|
||||
make \
|
||||
# psycopg2 dependencies
|
||||
# psycopg dependencies
|
||||
libpq-dev \
|
||||
# Translations dependencies
|
||||
gettext \
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
FROM node:18-bullseye-slim
|
||||
FROM docker.io/node:20-bookworm-slim
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
FROM garland/aws-cli-docker:1.16.140
|
||||
FROM docker.io/amazon/aws-cli:2.16.8
|
||||
|
||||
COPY ./compose/production/aws/maintenance /usr/local/bin/maintenance
|
||||
COPY ./compose/production/postgres/maintenance/_sourced /usr/local/bin/maintenance/_sourced
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
### Download a file from your Amazon S3 bucket to the postgres /backups folder
|
||||
###
|
||||
### Usage:
|
||||
### $ docker compose -f production.yml run --rm awscli <1>
|
||||
### $ docker compose -f docker-compose.production.yml run --rm awscli <1>
|
||||
|
||||
set -o errexit
|
||||
set -o pipefail
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
### Upload the /backups folder to Amazon S3
|
||||
###
|
||||
### Usage:
|
||||
### $ docker compose -f production.yml run --rm awscli upload
|
||||
### $ docker compose -f docker-compose.production.yml run --rm awscli upload
|
||||
|
||||
set -o errexit
|
||||
set -o pipefail
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
{% if cookiecutter.frontend_pipeline in ['Gulp', 'Webpack'] -%}
|
||||
FROM node:18-bullseye-slim as client-builder
|
||||
FROM docker.io/node:20-bookworm-slim as client-builder
|
||||
|
||||
ARG APP_HOME=/app
|
||||
WORKDIR ${APP_HOME}
|
||||
|
@ -25,7 +25,7 @@ RUN npm run build
|
|||
|
||||
{%- endif %}
|
||||
# define an alias for the specific python version used in this file.
|
||||
FROM python:3.11.6-slim-bullseye as python
|
||||
FROM docker.io/python:3.12.4-slim-bookworm as python
|
||||
|
||||
# Python build stage
|
||||
FROM python as python-build-stage
|
||||
|
@ -36,7 +36,7 @@ ARG BUILD_ENVIRONMENT=production
|
|||
RUN apt-get update && apt-get install --no-install-recommends -y \
|
||||
# dependencies for building Python packages
|
||||
build-essential \
|
||||
# psycopg2 dependencies
|
||||
# psycopg dependencies
|
||||
libpq-dev
|
||||
|
||||
# Requirements are installed here to ensure they will be cached.
|
||||
|
@ -65,7 +65,7 @@ RUN addgroup --system django \
|
|||
|
||||
# Install required system dependencies
|
||||
RUN apt-get update && apt-get install --no-install-recommends -y \
|
||||
# psycopg2 dependencies
|
||||
# psycopg dependencies
|
||||
libpq-dev \
|
||||
# Translations dependencies
|
||||
gettext \
|
||||
|
@ -117,7 +117,7 @@ COPY --chown=django:django . ${APP_HOME}
|
|||
{%- endif %}
|
||||
|
||||
# make django owner of the WORKDIR directory as well.
|
||||
RUN chown django:django ${APP_HOME}
|
||||
RUN chown -R django:django ${APP_HOME}
|
||||
|
||||
USER django
|
||||
|
||||
|
|
|
@ -4,6 +4,14 @@ set -o errexit
|
|||
set -o nounset
|
||||
|
||||
|
||||
|
||||
until timeout 10 celery -A config.celery_app inspect ping; do
|
||||
>&2 echo "Celery workers not available"
|
||||
done
|
||||
|
||||
echo 'Starting flower'
|
||||
|
||||
|
||||
exec celery \
|
||||
-A config.celery_app \
|
||||
-b "${CELERY_BROKER_URL}" \
|
||||
|
|
|
@ -28,7 +28,7 @@ if compress_enabled; then
|
|||
fi
|
||||
{%- endif %}
|
||||
{%- if cookiecutter.use_async == 'y' %}
|
||||
exec /usr/local/bin/gunicorn config.asgi --bind 0.0.0.0:5000 --chdir=/app -k uvicorn.workers.UvicornWorker
|
||||
exec /usr/local/bin/gunicorn config.asgi --bind 0.0.0.0:5000 --chdir=/app -k uvicorn_worker.UvicornWorker
|
||||
{%- else %}
|
||||
exec /usr/local/bin/gunicorn config.wsgi --bind 0.0.0.0:5000 --chdir=/app
|
||||
{%- endif %}
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
FROM nginx:1.17.8-alpine
|
||||
FROM docker.io/nginx:1.17.8-alpine
|
||||
COPY ./compose/production/nginx/default.conf /etc/nginx/conf.d/default.conf
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
FROM postgres:{{ cookiecutter.postgresql_version }}
|
||||
FROM docker.io/postgres:{{ cookiecutter.postgresql_version }}
|
||||
|
||||
COPY ./compose/production/postgres/maintenance /usr/local/bin/maintenance
|
||||
RUN chmod +x /usr/local/bin/maintenance/*
|
||||
|
|
|
@ -0,0 +1,36 @@
|
|||
#!/usr/bin/env bash
|
||||
|
||||
### Remove a database backup.
|
||||
###
|
||||
### Parameters:
|
||||
### <1> filename of a backup to remove.
|
||||
###
|
||||
### Usage:
|
||||
### $ docker-compose -f <environment>.yml (exec |run --rm) postgres rmbackup <1>
|
||||
|
||||
|
||||
set -o errexit
|
||||
set -o pipefail
|
||||
set -o nounset
|
||||
|
||||
|
||||
working_dir="$(dirname ${0})"
|
||||
source "${working_dir}/_sourced/constants.sh"
|
||||
source "${working_dir}/_sourced/messages.sh"
|
||||
|
||||
|
||||
if [[ -z ${1+x} ]]; then
|
||||
message_error "Backup filename is not specified yet it is a required parameter. Make sure you provide one and try again."
|
||||
exit 1
|
||||
fi
|
||||
backup_filename="${BACKUP_DIR_PATH}/${1}"
|
||||
if [[ ! -f "${backup_filename}" ]]; then
|
||||
message_error "No backup with the specified filename found. Check out the 'backups' maintenance script output to see if there is one and try again."
|
||||
exit 1
|
||||
fi
|
||||
|
||||
message_welcome "Removing the '${backup_filename}' backup file..."
|
||||
|
||||
rm -r "${backup_filename}"
|
||||
|
||||
message_success "The '${backup_filename}' database backup has been removed."
|
|
@ -1,4 +1,4 @@
|
|||
FROM traefik:2.10.4
|
||||
FROM docker.io/traefik:2.11.2
|
||||
RUN mkdir -p /etc/traefik/acme \
|
||||
&& touch /etc/traefik/acme/acme.json \
|
||||
&& chmod 600 /etc/traefik/acme/acme.json
|
||||
|
|
|
@ -6,7 +6,7 @@ entryPoints:
|
|||
# http
|
||||
address: ':80'
|
||||
http:
|
||||
# https://docs.traefik.io/routing/entrypoints/#entrypoint
|
||||
# https://doc.traefik.io/traefik/routing/entrypoints/#entrypoint
|
||||
redirections:
|
||||
entryPoint:
|
||||
to: web-secure
|
||||
|
@ -22,11 +22,11 @@ entryPoints:
|
|||
|
||||
certificatesResolvers:
|
||||
letsencrypt:
|
||||
# https://docs.traefik.io/master/https/acme/#lets-encrypt
|
||||
# https://doc.traefik.io/traefik/https/acme/#lets-encrypt
|
||||
acme:
|
||||
email: '{{ cookiecutter.email }}'
|
||||
storage: /etc/traefik/acme/acme.json
|
||||
# https://docs.traefik.io/master/https/acme/#httpchallenge
|
||||
# https://doc.traefik.io/traefik/https/acme/#httpchallenge
|
||||
httpChallenge:
|
||||
entryPoint: web
|
||||
|
||||
|
@ -44,7 +44,7 @@ http:
|
|||
- csrf
|
||||
service: django
|
||||
tls:
|
||||
# https://docs.traefik.io/master/routing/routers/#certresolver
|
||||
# https://doc.traefik.io/traefik/routing/routers/#certresolver
|
||||
certResolver: letsencrypt
|
||||
{%- if cookiecutter.use_celery == 'y' %}
|
||||
|
||||
|
@ -54,7 +54,7 @@ http:
|
|||
- flower
|
||||
service: flower
|
||||
tls:
|
||||
# https://docs.traefik.io/master/routing/routers/#certresolver
|
||||
# https://doc.traefik.io/traefik/master/routing/routers/#certresolver
|
||||
certResolver: letsencrypt
|
||||
{%- endif %}
|
||||
{%- if cookiecutter.cloud_provider == 'None' %}
|
||||
|
@ -76,7 +76,7 @@ http:
|
|||
|
||||
middlewares:
|
||||
csrf:
|
||||
# https://docs.traefik.io/master/middlewares/headers/#hostsproxyheaders
|
||||
# https://doc.traefik.io/traefik/master/middlewares/http/headers/#hostsproxyheaders
|
||||
# https://docs.djangoproject.com/en/dev/ref/csrf/#ajax
|
||||
headers:
|
||||
hostsProxyHeaders: ['X-CSRFToken']
|
||||
|
@ -102,7 +102,7 @@ http:
|
|||
{%- endif %}
|
||||
|
||||
providers:
|
||||
# https://docs.traefik.io/master/providers/file/
|
||||
# https://doc.traefik.io/traefik/master/providers/file/
|
||||
file:
|
||||
filename: /etc/traefik/traefik.yml
|
||||
watch: true
|
||||
|
|
|
@ -1,12 +1,10 @@
|
|||
from django.conf import settings
|
||||
from rest_framework.routers import DefaultRouter, SimpleRouter
|
||||
from rest_framework.routers import DefaultRouter
|
||||
from rest_framework.routers import SimpleRouter
|
||||
|
||||
from {{ cookiecutter.project_slug }}.users.api.views import UserViewSet
|
||||
|
||||
if settings.DEBUG:
|
||||
router = DefaultRouter()
|
||||
else:
|
||||
router = SimpleRouter()
|
||||
router = DefaultRouter() if settings.DEBUG else SimpleRouter()
|
||||
|
||||
router.register("users", UserViewSet)
|
||||
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
# ruff: noqa
|
||||
"""
|
||||
ASGI config for {{ cookiecutter.project_name }} project.
|
||||
|
||||
|
@ -7,6 +8,7 @@ For more information on this file, see
|
|||
https://docs.djangoproject.com/en/dev/howto/deployment/asgi/
|
||||
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
@ -28,7 +30,7 @@ django_application = get_asgi_application()
|
|||
# application = HelloWorldApplication(application)
|
||||
|
||||
# Import websocket application here, so apps from django_application are loaded first
|
||||
from config.websocket import websocket_application # noqa isort:skip
|
||||
from config.websocket import websocket_application
|
||||
|
||||
|
||||
async def application(scope, receive, send):
|
||||
|
@ -37,4 +39,5 @@ async def application(scope, receive, send):
|
|||
elif scope["type"] == "websocket":
|
||||
await websocket_application(scope, receive, send)
|
||||
else:
|
||||
raise NotImplementedError(f"Unknown scope type {scope['type']}")
|
||||
msg = f"Unknown scope type {scope['type']}"
|
||||
raise NotImplementedError(msg)
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
"""
|
||||
Base settings to build other settings files upon.
|
||||
"""
|
||||
# ruff: noqa: ERA001, E501
|
||||
"""Base settings to build other settings files upon."""
|
||||
|
||||
from pathlib import Path
|
||||
|
||||
import environ
|
||||
|
@ -84,6 +84,7 @@ THIRD_PARTY_APPS = [
|
|||
"crispy_bootstrap5",
|
||||
"allauth",
|
||||
"allauth.account",
|
||||
"allauth.mfa",
|
||||
"allauth.socialaccount",
|
||||
{%- if cookiecutter.use_celery == 'y' %}
|
||||
"django_celery_beat",
|
||||
|
@ -137,7 +138,9 @@ PASSWORD_HASHERS = [
|
|||
]
|
||||
# https://docs.djangoproject.com/en/dev/ref/settings/#auth-password-validators
|
||||
AUTH_PASSWORD_VALIDATORS = [
|
||||
{"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator"},
|
||||
{
|
||||
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator",
|
||||
},
|
||||
{"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator"},
|
||||
{"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator"},
|
||||
{"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator"},
|
||||
|
@ -210,7 +213,7 @@ TEMPLATES = [
|
|||
"{{cookiecutter.project_slug}}.users.context_processors.allauth_settings",
|
||||
],
|
||||
},
|
||||
}
|
||||
},
|
||||
]
|
||||
|
||||
# https://docs.djangoproject.com/en/dev/ref/settings/#form-renderer
|
||||
|
@ -274,7 +277,7 @@ LOGGING = {
|
|||
"level": "DEBUG",
|
||||
"class": "logging.StreamHandler",
|
||||
"formatter": "verbose",
|
||||
}
|
||||
},
|
||||
},
|
||||
"root": {"level": "INFO", "handlers": ["console"]},
|
||||
}
|
||||
|
@ -319,25 +322,25 @@ CELERY_TASK_SEND_SENT_EVENT = True
|
|||
# django-allauth
|
||||
# ------------------------------------------------------------------------------
|
||||
ACCOUNT_ALLOW_REGISTRATION = env.bool("DJANGO_ACCOUNT_ALLOW_REGISTRATION", True)
|
||||
# https://django-allauth.readthedocs.io/en/latest/configuration.html
|
||||
# https://docs.allauth.org/en/latest/account/configuration.html
|
||||
ACCOUNT_AUTHENTICATION_METHOD = "{{cookiecutter.username_type}}"
|
||||
# https://django-allauth.readthedocs.io/en/latest/configuration.html
|
||||
# https://docs.allauth.org/en/latest/account/configuration.html
|
||||
ACCOUNT_EMAIL_REQUIRED = True
|
||||
{%- if cookiecutter.username_type == "email" %}
|
||||
# https://django-allauth.readthedocs.io/en/latest/configuration.html
|
||||
# https://docs.allauth.org/en/latest/account/configuration.html
|
||||
ACCOUNT_USERNAME_REQUIRED = False
|
||||
# https://django-allauth.readthedocs.io/en/latest/configuration.html
|
||||
# https://docs.allauth.org/en/latest/account/configuration.html
|
||||
ACCOUNT_USER_MODEL_USERNAME_FIELD = None
|
||||
{%- endif %}
|
||||
# https://django-allauth.readthedocs.io/en/latest/configuration.html
|
||||
# https://docs.allauth.org/en/latest/account/configuration.html
|
||||
ACCOUNT_EMAIL_VERIFICATION = "mandatory"
|
||||
# https://django-allauth.readthedocs.io/en/latest/configuration.html
|
||||
# https://docs.allauth.org/en/latest/account/configuration.html
|
||||
ACCOUNT_ADAPTER = "{{cookiecutter.project_slug}}.users.adapters.AccountAdapter"
|
||||
# https://django-allauth.readthedocs.io/en/latest/forms.html
|
||||
# https://docs.allauth.org/en/latest/account/forms.html
|
||||
ACCOUNT_FORMS = {"signup": "{{cookiecutter.project_slug}}.users.forms.UserSignupForm"}
|
||||
# https://django-allauth.readthedocs.io/en/latest/configuration.html
|
||||
# https://docs.allauth.org/en/latest/socialaccount/configuration.html
|
||||
SOCIALACCOUNT_ADAPTER = "{{cookiecutter.project_slug}}.users.adapters.SocialAccountAdapter"
|
||||
# https://django-allauth.readthedocs.io/en/latest/forms.html
|
||||
# https://docs.allauth.org/en/latest/socialaccount/configuration.html
|
||||
SOCIALACCOUNT_FORMS = {"signup": "{{cookiecutter.project_slug}}.users.forms.UserSocialSignupForm"}
|
||||
{% if cookiecutter.frontend_pipeline == 'Django Compressor' -%}
|
||||
# django-compressor
|
||||
|
@ -369,6 +372,7 @@ SPECTACULAR_SETTINGS = {
|
|||
"DESCRIPTION": "Documentation of API endpoints of {{ cookiecutter.project_name }}",
|
||||
"VERSION": "1.0.0",
|
||||
"SERVE_PERMISSIONS": ["rest_framework.permissions.IsAdminUser"],
|
||||
"SCHEMA_PATH_PREFIX": "/api/",
|
||||
}
|
||||
{%- endif %}
|
||||
{%- if cookiecutter.frontend_pipeline == 'Webpack' %}
|
||||
|
@ -380,7 +384,7 @@ WEBPACK_LOADER = {
|
|||
"STATS_FILE": BASE_DIR / "webpack-stats.json",
|
||||
"POLL_INTERVAL": 0.1,
|
||||
"IGNORE": [r".+\.hot-update.js", r".+\.map"],
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
{%- endif %}
|
||||
|
|
|
@ -1,4 +1,10 @@
|
|||
from .base import * # noqa
|
||||
# ruff: noqa: E501
|
||||
from .base import * # noqa: F403
|
||||
from .base import INSTALLED_APPS
|
||||
from .base import MIDDLEWARE
|
||||
{%- if cookiecutter.frontend_pipeline == 'Webpack' %}
|
||||
from .base import WEBPACK_LOADER
|
||||
{%- endif %}
|
||||
from .base import env
|
||||
|
||||
# GENERAL
|
||||
|
@ -11,7 +17,7 @@ SECRET_KEY = env(
|
|||
default="!!!SET DJANGO_SECRET_KEY!!!",
|
||||
)
|
||||
# https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts
|
||||
ALLOWED_HOSTS = ["localhost", "0.0.0.0", "127.0.0.1"]
|
||||
ALLOWED_HOSTS = ["localhost", "0.0.0.0", "127.0.0.1"] # noqa: S104
|
||||
|
||||
# CACHES
|
||||
# ------------------------------------------------------------------------------
|
||||
|
@ -20,7 +26,7 @@ CACHES = {
|
|||
"default": {
|
||||
"BACKEND": "django.core.cache.backends.locmem.LocMemCache",
|
||||
"LOCATION": "",
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
# EMAIL
|
||||
|
@ -37,7 +43,9 @@ EMAIL_HOST = "localhost"
|
|||
EMAIL_PORT = 1025
|
||||
{%- else -%}
|
||||
# https://docs.djangoproject.com/en/dev/ref/settings/#email-backend
|
||||
EMAIL_BACKEND = env("DJANGO_EMAIL_BACKEND", default="django.core.mail.backends.console.EmailBackend")
|
||||
EMAIL_BACKEND = env(
|
||||
"DJANGO_EMAIL_BACKEND", default="django.core.mail.backends.console.EmailBackend",
|
||||
)
|
||||
{%- endif %}
|
||||
|
||||
{%- if cookiecutter.use_whitenoise == 'y' %}
|
||||
|
@ -45,18 +53,23 @@ EMAIL_BACKEND = env("DJANGO_EMAIL_BACKEND", default="django.core.mail.backends.c
|
|||
# WhiteNoise
|
||||
# ------------------------------------------------------------------------------
|
||||
# http://whitenoise.evans.io/en/latest/django.html#using-whitenoise-in-development
|
||||
INSTALLED_APPS = ["whitenoise.runserver_nostatic"] + INSTALLED_APPS # noqa: F405
|
||||
INSTALLED_APPS = ["whitenoise.runserver_nostatic", *INSTALLED_APPS]
|
||||
{% endif %}
|
||||
|
||||
# django-debug-toolbar
|
||||
# ------------------------------------------------------------------------------
|
||||
# https://django-debug-toolbar.readthedocs.io/en/latest/installation.html#prerequisites
|
||||
INSTALLED_APPS += ["debug_toolbar"] # noqa: F405
|
||||
INSTALLED_APPS += ["debug_toolbar"]
|
||||
# https://django-debug-toolbar.readthedocs.io/en/latest/installation.html#middleware
|
||||
MIDDLEWARE += ["debug_toolbar.middleware.DebugToolbarMiddleware"] # noqa: F405
|
||||
MIDDLEWARE += ["debug_toolbar.middleware.DebugToolbarMiddleware"]
|
||||
# https://django-debug-toolbar.readthedocs.io/en/latest/configuration.html#debug-toolbar-config
|
||||
DEBUG_TOOLBAR_CONFIG = {
|
||||
"DISABLE_PANELS": ["debug_toolbar.panels.redirects.RedirectsPanel"],
|
||||
"DISABLE_PANELS": [
|
||||
"debug_toolbar.panels.redirects.RedirectsPanel",
|
||||
# Disable profiling panel due to an issue with Python 3.12:
|
||||
# https://github.com/jazzband/django-debug-toolbar/issues/1875
|
||||
"debug_toolbar.panels.profiling.ProfilingPanel",
|
||||
],
|
||||
"SHOW_TEMPLATE_CONTEXT": True,
|
||||
}
|
||||
# https://django-debug-toolbar.readthedocs.io/en/latest/installation.html#internal-ips
|
||||
|
@ -75,12 +88,21 @@ if env("USE_DOCKER") == "yes":
|
|||
# The node container isn't started (yet?)
|
||||
pass
|
||||
{%- endif %}
|
||||
{%- if cookiecutter.windows == 'y' %}
|
||||
# RunServerPlus
|
||||
# ------------------------------------------------------------------------------
|
||||
# This is a custom setting for RunServerPlus to fix reloader issue in Windows docker environment
|
||||
# Werkzeug reloader type [auto, watchdog, or stat]
|
||||
RUNSERVERPLUS_POLLER_RELOADER_TYPE = 'stat'
|
||||
# If you have CPU and IO load issues, you can increase this poller interval e.g) 5
|
||||
RUNSERVERPLUS_POLLER_RELOADER_INTERVAL = 1
|
||||
{%- endif %}
|
||||
{%- endif %}
|
||||
|
||||
# django-extensions
|
||||
# ------------------------------------------------------------------------------
|
||||
# https://django-extensions.readthedocs.io/en/latest/installation_instructions.html#configuration
|
||||
INSTALLED_APPS += ["django_extensions"] # noqa: F405
|
||||
INSTALLED_APPS += ["django_extensions"]
|
||||
{% if cookiecutter.use_celery == 'y' -%}
|
||||
|
||||
# Celery
|
||||
|
@ -96,7 +118,7 @@ CELERY_TASK_EAGER_PROPAGATES = True
|
|||
{%- if cookiecutter.frontend_pipeline == 'Webpack' %}
|
||||
# django-webpack-loader
|
||||
# ------------------------------------------------------------------------------
|
||||
WEBPACK_LOADER["DEFAULT"]["CACHE"] = not DEBUG # noqa: F405
|
||||
WEBPACK_LOADER["DEFAULT"]["CACHE"] = not DEBUG
|
||||
|
||||
{%- endif %}
|
||||
# Your stuff...
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
# ruff: noqa: E501
|
||||
{% if cookiecutter.use_sentry == 'y' -%}
|
||||
import logging
|
||||
|
||||
|
@ -12,7 +13,12 @@ from sentry_sdk.integrations.logging import LoggingIntegration
|
|||
from sentry_sdk.integrations.redis import RedisIntegration
|
||||
|
||||
{% endif -%}
|
||||
from .base import * # noqa
|
||||
from .base import * # noqa: F403
|
||||
from .base import DATABASES
|
||||
from .base import INSTALLED_APPS
|
||||
{%- if cookiecutter.use_drf == "y" %}
|
||||
from .base import SPECTACULAR_SETTINGS
|
||||
{%- endif %}
|
||||
from .base import env
|
||||
|
||||
# GENERAL
|
||||
|
@ -24,7 +30,7 @@ ALLOWED_HOSTS = env.list("DJANGO_ALLOWED_HOSTS", default=["{{ cookiecutter.domai
|
|||
|
||||
# DATABASES
|
||||
# ------------------------------------------------------------------------------
|
||||
DATABASES["default"]["CONN_MAX_AGE"] = env.int("CONN_MAX_AGE", default=60) # noqa: F405
|
||||
DATABASES["default"]["CONN_MAX_AGE"] = env.int("CONN_MAX_AGE", default=60)
|
||||
|
||||
# CACHES
|
||||
# ------------------------------------------------------------------------------
|
||||
|
@ -38,7 +44,7 @@ CACHES = {
|
|||
# https://github.com/jazzband/django-redis#memcached-exceptions-behavior
|
||||
"IGNORE_EXCEPTIONS": True,
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
# SECURITY
|
||||
|
@ -49,24 +55,34 @@ SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
|
|||
SECURE_SSL_REDIRECT = env.bool("DJANGO_SECURE_SSL_REDIRECT", default=True)
|
||||
# https://docs.djangoproject.com/en/dev/ref/settings/#session-cookie-secure
|
||||
SESSION_COOKIE_SECURE = True
|
||||
# https://docs.djangoproject.com/en/dev/ref/settings/#session-cookie-name
|
||||
SESSION_COOKIE_NAME = "__Secure-sessionid"
|
||||
# https://docs.djangoproject.com/en/dev/ref/settings/#csrf-cookie-secure
|
||||
CSRF_COOKIE_SECURE = True
|
||||
# https://docs.djangoproject.com/en/dev/ref/settings/#csrf-cookie-name
|
||||
CSRF_COOKIE_NAME = "__Secure-csrftoken"
|
||||
# https://docs.djangoproject.com/en/dev/topics/security/#ssl-https
|
||||
# https://docs.djangoproject.com/en/dev/ref/settings/#secure-hsts-seconds
|
||||
# TODO: set this to 60 seconds first and then to 518400 once you prove the former works
|
||||
SECURE_HSTS_SECONDS = 60
|
||||
# https://docs.djangoproject.com/en/dev/ref/settings/#secure-hsts-include-subdomains
|
||||
SECURE_HSTS_INCLUDE_SUBDOMAINS = env.bool("DJANGO_SECURE_HSTS_INCLUDE_SUBDOMAINS", default=True)
|
||||
SECURE_HSTS_INCLUDE_SUBDOMAINS = env.bool(
|
||||
"DJANGO_SECURE_HSTS_INCLUDE_SUBDOMAINS",
|
||||
default=True,
|
||||
)
|
||||
# https://docs.djangoproject.com/en/dev/ref/settings/#secure-hsts-preload
|
||||
SECURE_HSTS_PRELOAD = env.bool("DJANGO_SECURE_HSTS_PRELOAD", default=True)
|
||||
# https://docs.djangoproject.com/en/dev/ref/middleware/#x-content-type-options-nosniff
|
||||
SECURE_CONTENT_TYPE_NOSNIFF = env.bool("DJANGO_SECURE_CONTENT_TYPE_NOSNIFF", default=True)
|
||||
SECURE_CONTENT_TYPE_NOSNIFF = env.bool(
|
||||
"DJANGO_SECURE_CONTENT_TYPE_NOSNIFF",
|
||||
default=True,
|
||||
)
|
||||
|
||||
{% if cookiecutter.cloud_provider != 'None' -%}
|
||||
# STORAGES
|
||||
# ------------------------------------------------------------------------------
|
||||
# https://django-storages.readthedocs.io/en/latest/#installation
|
||||
INSTALLED_APPS += ["storages"] # noqa: F405
|
||||
INSTALLED_APPS += ["storages"]
|
||||
{%- endif -%}
|
||||
{% if cookiecutter.cloud_provider == 'AWS' %}
|
||||
# https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html#settings
|
||||
|
@ -103,35 +119,99 @@ AZURE_CONTAINER = env("DJANGO_AZURE_CONTAINER_NAME")
|
|||
{% endif -%}
|
||||
|
||||
{% if cookiecutter.cloud_provider != 'None' or cookiecutter.use_whitenoise == 'y' -%}
|
||||
# STATIC
|
||||
# STATIC & MEDIA
|
||||
# ------------------------
|
||||
{% endif -%}
|
||||
{% if cookiecutter.use_whitenoise == 'y' -%}
|
||||
STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage"
|
||||
{% elif cookiecutter.cloud_provider == 'AWS' -%}
|
||||
STATICFILES_STORAGE = "{{cookiecutter.project_slug}}.utils.storages.StaticS3Storage"
|
||||
STORAGES = {
|
||||
{%- if cookiecutter.use_whitenoise == 'y' and cookiecutter.cloud_provider == 'None' %}
|
||||
"default": {
|
||||
"BACKEND": "django.core.files.storage.FileSystemStorage",
|
||||
},
|
||||
"staticfiles": {
|
||||
"BACKEND": "whitenoise.storage.CompressedManifestStaticFilesStorage",
|
||||
},
|
||||
{%- elif cookiecutter.cloud_provider == 'AWS' %}
|
||||
"default": {
|
||||
"BACKEND": "storages.backends.s3.S3Storage",
|
||||
"OPTIONS": {
|
||||
"location": "media",
|
||||
"file_overwrite": False,
|
||||
},
|
||||
},
|
||||
{%- if cookiecutter.use_whitenoise == 'y' %}
|
||||
"staticfiles": {
|
||||
"BACKEND": "whitenoise.storage.CompressedManifestStaticFilesStorage",
|
||||
},
|
||||
{%- else %}
|
||||
"staticfiles": {
|
||||
"BACKEND": "storages.backends.s3.S3Storage",
|
||||
"OPTIONS": {
|
||||
"location": "static",
|
||||
"default_acl": "public-read",
|
||||
},
|
||||
},
|
||||
{%- endif %}
|
||||
{%- elif cookiecutter.cloud_provider == 'GCP' %}
|
||||
"default": {
|
||||
"BACKEND": "storages.backends.gcloud.GoogleCloudStorage",
|
||||
"OPTIONS": {
|
||||
"location": "media",
|
||||
"file_overwrite": False,
|
||||
},
|
||||
},
|
||||
{%- if cookiecutter.use_whitenoise == 'y' %}
|
||||
"staticfiles": {
|
||||
"BACKEND": "whitenoise.storage.CompressedManifestStaticFilesStorage",
|
||||
},
|
||||
{%- else %}
|
||||
"staticfiles": {
|
||||
"BACKEND": "storages.backends.gcloud.GoogleCloudStorage",
|
||||
"OPTIONS": {
|
||||
"location": "static",
|
||||
"default_acl": "publicRead",
|
||||
},
|
||||
},
|
||||
{%- endif %}
|
||||
{%- elif cookiecutter.cloud_provider == 'Azure' %}
|
||||
"default": {
|
||||
"BACKEND": "storages.backends.azure_storage.AzureStorage",
|
||||
"OPTIONS": {
|
||||
"location": "media",
|
||||
"file_overwrite": False,
|
||||
},
|
||||
},
|
||||
{%- if cookiecutter.use_whitenoise == 'y' %}
|
||||
"staticfiles": {
|
||||
"BACKEND": "whitenoise.storage.CompressedManifestStaticFilesStorage",
|
||||
},
|
||||
{%- else %}
|
||||
"staticfiles": {
|
||||
"BACKEND": "storages.backends.azure_storage.AzureStorage",
|
||||
"OPTIONS": {
|
||||
"location": "static",
|
||||
},
|
||||
},
|
||||
{%- endif %}
|
||||
{%- endif %}
|
||||
}
|
||||
{%- endif %}
|
||||
|
||||
{%- if cookiecutter.cloud_provider == 'AWS' %}
|
||||
MEDIA_URL = f"https://{aws_s3_domain}/media/"
|
||||
{%- if cookiecutter.use_whitenoise == 'n' %}
|
||||
COLLECTFAST_STRATEGY = "collectfast.strategies.boto3.Boto3Strategy"
|
||||
STATIC_URL = f"https://{aws_s3_domain}/static/"
|
||||
{% elif cookiecutter.cloud_provider == 'GCP' -%}
|
||||
STATICFILES_STORAGE = "{{cookiecutter.project_slug}}.utils.storages.StaticGoogleCloudStorage"
|
||||
{%- endif %}
|
||||
{%- elif cookiecutter.cloud_provider == 'GCP' %}
|
||||
MEDIA_URL = f"https://storage.googleapis.com/{GS_BUCKET_NAME}/media/"
|
||||
{%- if cookiecutter.use_whitenoise == 'n' %}
|
||||
COLLECTFAST_STRATEGY = "collectfast.strategies.gcloud.GoogleCloudStrategy"
|
||||
STATIC_URL = f"https://storage.googleapis.com/{GS_BUCKET_NAME}/static/"
|
||||
{% elif cookiecutter.cloud_provider == 'Azure' -%}
|
||||
STATICFILES_STORAGE = "{{cookiecutter.project_slug}}.utils.storages.StaticAzureStorage"
|
||||
STATIC_URL = f"https://{AZURE_ACCOUNT_NAME}.blob.core.windows.net/static/"
|
||||
{% endif -%}
|
||||
|
||||
# MEDIA
|
||||
# ------------------------------------------------------------------------------
|
||||
{%- if cookiecutter.cloud_provider == 'AWS' %}
|
||||
DEFAULT_FILE_STORAGE = "{{cookiecutter.project_slug}}.utils.storages.MediaS3Storage"
|
||||
MEDIA_URL = f"https://{aws_s3_domain}/media/"
|
||||
{%- elif cookiecutter.cloud_provider == 'GCP' %}
|
||||
DEFAULT_FILE_STORAGE = "{{cookiecutter.project_slug}}.utils.storages.MediaGoogleCloudStorage"
|
||||
MEDIA_URL = f"https://storage.googleapis.com/{GS_BUCKET_NAME}/media/"
|
||||
{%- endif %}
|
||||
{%- elif cookiecutter.cloud_provider == 'Azure' %}
|
||||
DEFAULT_FILE_STORAGE = "{{cookiecutter.project_slug}}.utils.storages.MediaAzureStorage"
|
||||
MEDIA_URL = f"https://{AZURE_ACCOUNT_NAME}.blob.core.windows.net/media/"
|
||||
{%- if cookiecutter.use_whitenoise == 'n' %}
|
||||
STATIC_URL = f"https://{AZURE_ACCOUNT_NAME}.blob.core.windows.net/static/"
|
||||
{%- endif %}
|
||||
{%- endif %}
|
||||
|
||||
# EMAIL
|
||||
|
@ -157,7 +237,7 @@ ADMIN_URL = env("DJANGO_ADMIN_URL")
|
|||
# Anymail
|
||||
# ------------------------------------------------------------------------------
|
||||
# https://anymail.readthedocs.io/en/stable/installation/#installing-anymail
|
||||
INSTALLED_APPS += ["anymail"] # noqa: F405
|
||||
INSTALLED_APPS += ["anymail"]
|
||||
# https://docs.djangoproject.com/en/dev/ref/settings/#email-backend
|
||||
# https://anymail.readthedocs.io/en/stable/installation/#anymail-settings-reference
|
||||
{%- if cookiecutter.mail_service == 'Mailgun' %}
|
||||
|
@ -200,12 +280,12 @@ ANYMAIL = {
|
|||
"SENDGRID_API_KEY": env("SENDGRID_API_KEY"),
|
||||
"SENDGRID_API_URL": env("SENDGRID_API_URL", default="https://api.sendgrid.com/v3/"),
|
||||
}
|
||||
{%- elif cookiecutter.mail_service == 'SendinBlue' %}
|
||||
# https://anymail.readthedocs.io/en/stable/esps/sendinblue/
|
||||
EMAIL_BACKEND = "anymail.backends.sendinblue.EmailBackend"
|
||||
{%- elif cookiecutter.mail_service == 'Brevo' %}
|
||||
# https://anymail.readthedocs.io/en/stable/esps/brevo/
|
||||
EMAIL_BACKEND = "anymail.backends.brevo.EmailBackend"
|
||||
ANYMAIL = {
|
||||
"SENDINBLUE_API_KEY": env("SENDINBLUE_API_KEY"),
|
||||
"SENDINBLUE_API_URL": env("SENDINBLUE_API_URL", default="https://api.sendinblue.com/v3/"),
|
||||
"BREVO_API_KEY": env("BREVO_API_KEY"),
|
||||
"BREVO_API_URL": env("BREVO_API_URL", default="https://api.brevo.com/v3/"),
|
||||
}
|
||||
{%- elif cookiecutter.mail_service == 'SparkPost' %}
|
||||
# https://anymail.readthedocs.io/en/stable/esps/sparkpost/
|
||||
|
@ -230,10 +310,11 @@ COMPRESS_ENABLED = env.bool("COMPRESS_ENABLED", default=True)
|
|||
COMPRESS_STORAGE = "compressor.storage.GzipCompressorFileStorage"
|
||||
{%- elif cookiecutter.cloud_provider in ('AWS', 'GCP', 'Azure') and cookiecutter.use_whitenoise == 'n' %}
|
||||
# https://django-compressor.readthedocs.io/en/latest/settings/#django.conf.settings.COMPRESS_STORAGE
|
||||
COMPRESS_STORAGE = STATICFILES_STORAGE
|
||||
COMPRESS_STORAGE = STORAGES["staticfiles"]["BACKEND"]
|
||||
{%- endif %}
|
||||
# https://django-compressor.readthedocs.io/en/latest/settings/#django.conf.settings.COMPRESS_URL
|
||||
COMPRESS_URL = STATIC_URL{% if cookiecutter.use_whitenoise == 'y' or cookiecutter.cloud_provider == 'None' %} # noqa: F405{% endif %}
|
||||
COMPRESS_URL = STATIC_URL{% if cookiecutter.use_whitenoise == 'y' or cookiecutter.cloud_provider == 'None' %} # noqa: F405
|
||||
{%- endif -%}
|
||||
{%- if cookiecutter.use_whitenoise == 'y' %}
|
||||
# https://django-compressor.readthedocs.io/en/latest/settings/#django.conf.settings.COMPRESS_OFFLINE
|
||||
COMPRESS_OFFLINE = True # Offline compression is required when using Whitenoise
|
||||
|
@ -251,7 +332,7 @@ COMPRESS_FILTERS = {
|
|||
# Collectfast
|
||||
# ------------------------------------------------------------------------------
|
||||
# https://github.com/antonagestam/collectfast#installation
|
||||
INSTALLED_APPS = ["collectfast"] + INSTALLED_APPS # noqa: F405
|
||||
INSTALLED_APPS = ["collectfast", *INSTALLED_APPS]
|
||||
{% endif %}
|
||||
# LOGGING
|
||||
# ------------------------------------------------------------------------------
|
||||
|
@ -311,7 +392,7 @@ LOGGING = {
|
|||
"level": "DEBUG",
|
||||
"class": "logging.StreamHandler",
|
||||
"formatter": "verbose",
|
||||
}
|
||||
},
|
||||
},
|
||||
"root": {"level": "INFO", "handlers": ["console"]},
|
||||
"loggers": {
|
||||
|
@ -363,7 +444,7 @@ sentry_sdk.init(
|
|||
# django-rest-framework
|
||||
# -------------------------------------------------------------------------------
|
||||
# Tools that generate code samples can use SERVERS to point to the correct domain
|
||||
SPECTACULAR_SETTINGS["SERVERS"] = [ # noqa: F405
|
||||
SPECTACULAR_SETTINGS["SERVERS"] = [
|
||||
{"url": "https://{{ cookiecutter.domain_name }}", "description": "Production server"},
|
||||
]
|
||||
|
||||
|
|
|
@ -2,7 +2,8 @@
|
|||
With these settings, tests run faster.
|
||||
"""
|
||||
|
||||
from .base import * # noqa
|
||||
from .base import * # noqa: F403
|
||||
from .base import TEMPLATES
|
||||
from .base import env
|
||||
|
||||
# GENERAL
|
||||
|
@ -27,17 +28,17 @@ EMAIL_BACKEND = "django.core.mail.backends.locmem.EmailBackend"
|
|||
|
||||
# DEBUGGING FOR TEMPLATES
|
||||
# ------------------------------------------------------------------------------
|
||||
TEMPLATES[0]["OPTIONS"]["debug"] = True # type: ignore # noqa: F405
|
||||
TEMPLATES[0]["OPTIONS"]["debug"] = True # type: ignore[index]
|
||||
|
||||
# MEDIA
|
||||
# ------------------------------------------------------------------------------
|
||||
# https://docs.djangoproject.com/en/dev/ref/settings/#media-url
|
||||
MEDIA_URL = 'http://media.testserver'
|
||||
MEDIA_URL = "http://media.testserver"
|
||||
|
||||
{%- if cookiecutter.frontend_pipeline == 'Webpack' %}
|
||||
# django-webpack-loader
|
||||
# ------------------------------------------------------------------------------
|
||||
WEBPACK_LOADER["DEFAULT"]["LOADER_CLASS"] = "webpack_loader.loader.FakeWebpackLoader" # noqa: F405
|
||||
WEBPACK_LOADER["DEFAULT"]["LOADER_CLASS"] = "webpack_loader.loaders.FakeWebpackLoader" # noqa: F405
|
||||
|
||||
{%- endif %}
|
||||
# Your stuff...
|
||||
|
|
|
@ -1,27 +1,37 @@
|
|||
# ruff: noqa
|
||||
from django.conf import settings
|
||||
from django.conf.urls.static import static
|
||||
from django.contrib import admin
|
||||
{%- if cookiecutter.use_async == 'y' %}
|
||||
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
|
||||
{%- endif %}
|
||||
from django.urls import include, path
|
||||
from django.urls import include
|
||||
from django.urls import path
|
||||
from django.views import defaults as default_views
|
||||
from django.views.generic import TemplateView
|
||||
{%- if cookiecutter.use_drf == 'y' %}
|
||||
from drf_spectacular.views import SpectacularAPIView, SpectacularSwaggerView
|
||||
from drf_spectacular.views import SpectacularAPIView
|
||||
from drf_spectacular.views import SpectacularSwaggerView
|
||||
from rest_framework.authtoken.views import obtain_auth_token
|
||||
{%- endif %}
|
||||
|
||||
urlpatterns = [
|
||||
path("", TemplateView.as_view(template_name="pages/home.html"), name="home"),
|
||||
path("about/", TemplateView.as_view(template_name="pages/about.html"), name="about"),
|
||||
path(
|
||||
"about/",
|
||||
TemplateView.as_view(template_name="pages/about.html"),
|
||||
name="about",
|
||||
),
|
||||
# Django Admin, use {% raw %}{% url 'admin:index' %}{% endraw %}
|
||||
path(settings.ADMIN_URL, admin.site.urls),
|
||||
# User management
|
||||
path("users/", include("{{ cookiecutter.project_slug }}.users.urls", namespace="users")),
|
||||
path("accounts/", include("allauth.urls")),
|
||||
# Your stuff: custom urls includes go here
|
||||
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
||||
# ...
|
||||
# Media files
|
||||
*static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT),
|
||||
]
|
||||
{%- if cookiecutter.use_async == 'y' %}
|
||||
if settings.DEBUG:
|
||||
# Static file serving when using Gunicorn + Uvicorn for local web socket development
|
||||
|
@ -33,7 +43,7 @@ urlpatterns += [
|
|||
# API base url
|
||||
path("api/", include("config.api_router")),
|
||||
# DRF auth token
|
||||
path("auth-token/", obtain_auth_token),
|
||||
path("api/auth-token/", obtain_auth_token),
|
||||
path("api/schema/", SpectacularAPIView.as_view(), name="api-schema"),
|
||||
path(
|
||||
"api/docs/",
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
# ruff: noqa
|
||||
"""
|
||||
WSGI config for {{ cookiecutter.project_name }} project.
|
||||
|
||||
|
@ -13,6 +14,7 @@ middleware here, or combine a Django application with an application of another
|
|||
framework.
|
||||
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
|
16
{{cookiecutter.project_slug}}/docker-compose.docs.yml
Normal file
16
{{cookiecutter.project_slug}}/docker-compose.docs.yml
Normal file
|
@ -0,0 +1,16 @@
|
|||
services:
|
||||
docs:
|
||||
image: {{ cookiecutter.project_slug }}_local_docs
|
||||
container_name: {{ cookiecutter.project_slug }}_local_docs
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./compose/local/docs/Dockerfile
|
||||
env_file:
|
||||
- ./.envs/.local/.django
|
||||
volumes:
|
||||
- ./docs:/docs:z
|
||||
- ./config:/app/config:z
|
||||
- ./{{ cookiecutter.project_slug }}:/app/{{ cookiecutter.project_slug }}:z
|
||||
ports:
|
||||
- '9000:9000'
|
||||
command: /start-docs
|
|
@ -1,8 +1,7 @@
|
|||
version: '3'
|
||||
|
||||
volumes:
|
||||
{{ cookiecutter.project_slug }}_local_postgres_data: {}
|
||||
{{ cookiecutter.project_slug }}_local_postgres_data_backups: {}
|
||||
{% if cookiecutter.use_celery == 'y' %}{{ cookiecutter.project_slug }}_local_redis_data: {}{% endif %}
|
||||
|
||||
services:
|
||||
django:{% if cookiecutter.use_celery == 'y' %} &django{% endif %}
|
||||
|
@ -40,25 +39,10 @@ services:
|
|||
env_file:
|
||||
- ./.envs/.local/.postgres
|
||||
|
||||
docs:
|
||||
image: {{ cookiecutter.project_slug }}_local_docs
|
||||
container_name: {{ cookiecutter.project_slug }}_local_docs
|
||||
build:
|
||||
context: .
|
||||
dockerfile: ./compose/local/docs/Dockerfile
|
||||
env_file:
|
||||
- ./.envs/.local/.django
|
||||
volumes:
|
||||
- ./docs:/docs:z
|
||||
- ./config:/app/config:z
|
||||
- ./{{ cookiecutter.project_slug }}:/app/{{ cookiecutter.project_slug }}:z
|
||||
ports:
|
||||
- '9000:9000'
|
||||
command: /start-docs
|
||||
{%- if cookiecutter.use_mailpit == 'y' %}
|
||||
|
||||
mailpit:
|
||||
image: axllent/mailpit:v1.8
|
||||
image: docker.io/axllent/mailpit:latest
|
||||
container_name: {{ cookiecutter.project_slug }}_local_mailpit
|
||||
ports:
|
||||
- "8025:8025"
|
||||
|
@ -67,8 +51,12 @@ services:
|
|||
{%- if cookiecutter.use_celery == 'y' %}
|
||||
|
||||
redis:
|
||||
image: redis:6
|
||||
image: docker.io/redis:6
|
||||
container_name: {{ cookiecutter.project_slug }}_local_redis
|
||||
{% if cookiecutter.use_celery == 'y' %}
|
||||
volumes:
|
||||
- {{ cookiecutter.project_slug }}_local_redis_data:/data
|
||||
{% endif %}
|
||||
|
||||
celeryworker:
|
||||
<<: *django
|
|
@ -1,5 +1,3 @@
|
|||
version: '3'
|
||||
|
||||
volumes:
|
||||
production_postgres_data: {}
|
||||
production_postgres_data_backups: {}
|
||||
|
@ -7,6 +5,10 @@ volumes:
|
|||
{%- if cookiecutter.cloud_provider == 'None' %}
|
||||
production_django_media: {}
|
||||
{%- endif %}
|
||||
{% if cookiecutter.use_celery == 'y' %}
|
||||
production_redis_data: {}
|
||||
{% endif %}
|
||||
|
||||
|
||||
services:
|
||||
django:{% if cookiecutter.use_celery == 'y' %} &django{% endif %}
|
||||
|
@ -67,7 +69,13 @@ services:
|
|||
{%- endif %}
|
||||
|
||||
redis:
|
||||
image: redis:6
|
||||
image: docker.io/redis:6
|
||||
{% if cookiecutter.use_celery == 'y' %}
|
||||
volumes:
|
||||
- production_redis_data:/data
|
||||
{% endif %}
|
||||
|
||||
|
||||
{%- if cookiecutter.use_celery == 'y' %}
|
||||
|
||||
celeryworker:
|
||||
|
@ -102,7 +110,7 @@ services:
|
|||
build:
|
||||
context: .
|
||||
dockerfile: ./compose/production/nginx/Dockerfile
|
||||
image: {{ cookiecutter.project_slug }}_local_nginx
|
||||
image: {{ cookiecutter.project_slug }}_production_nginx
|
||||
depends_on:
|
||||
- django
|
||||
volumes:
|
|
@ -1,3 +1,4 @@
|
|||
# ruff: noqa
|
||||
# Configuration file for the Sphinx documentation builder.
|
||||
#
|
||||
# This file only contains a selection of the most common options. For a full
|
||||
|
|
|
@ -15,7 +15,7 @@ from inside the `{{cookiecutter.project_slug}}/docs` directory.
|
|||
{% else %}
|
||||
To build and serve docs, use the commands::
|
||||
|
||||
docker compose -f local.yml up docs
|
||||
docker compose -f docker-compose.local.yml up docs
|
||||
|
||||
{% endif %}
|
||||
|
||||
|
@ -26,7 +26,7 @@ Changes to files in `docs/_source` will be picked up and reloaded automatically.
|
|||
Docstrings to Documentation
|
||||
----------------------------------------------------------------------
|
||||
|
||||
The sphinx extension `apidoc <https://www.sphinx-doc.org/en/master/man/sphinx-apidoc.html/>`_ is used to automatically document code using signatures and docstrings.
|
||||
The sphinx extension `apidoc <https://www.sphinx-doc.org/en/master/man/sphinx-apidoc.html>`_ is used to automatically document code using signatures and docstrings.
|
||||
|
||||
Numpy or Google style docstrings will be picked up from project files and available for documentation. See the `Napoleon <https://sphinxcontrib-napoleon.readthedocs.io/en/latest/>`_ extension for details.
|
||||
|
||||
|
|
|
@ -21,7 +21,7 @@ Next, you have to add new remote python interpreter, based on already tested dep
|
|||
|
||||
.. image:: images/3.png
|
||||
|
||||
Switch to *Docker Compose* and select `local.yml` file from directory of your project, next set *Service name* to `django`
|
||||
Switch to *Docker Compose* and select `docker-compose.local.yml` file from directory of your project, next set *Service name* to `django`
|
||||
|
||||
.. image:: images/4.png
|
||||
|
||||
|
|
|
@ -106,7 +106,7 @@ function imgCompression() {
|
|||
function asyncRunServer() {
|
||||
const cmd = spawn(
|
||||
'gunicorn',
|
||||
['config.asgi', '-k', 'uvicorn.workers.UvicornWorker', '--reload'],
|
||||
['config.asgi', '-k', 'uvicorn_worker.UvicornWorker', '--reload'],
|
||||
{stdio: 'inherit'},
|
||||
);
|
||||
cmd.on('close', function (code) {
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
Start by configuring the `LANGUAGES` settings in `base.py`, by uncommenting languages you are willing to support. Then, translations strings will be placed in this folder when running:
|
||||
|
||||
```bash
|
||||
{% if cookiecutter.use_docker == 'y' %}docker compose -f local.yml run --rm django {% endif %}python manage.py makemessages -all --no-location
|
||||
{% if cookiecutter.use_docker == 'y' %}docker compose -f docker-compose.local.yml run --rm django {% endif %}python manage.py makemessages -all --no-location
|
||||
```
|
||||
|
||||
This should generate `django.po` (stands for Portable Object) files under each locale `<locale name>/LC_MESSAGES/django.po`. Each translatable string in the codebase is collected with its `msgid` and need to be translated as `msgstr`, for example:
|
||||
|
@ -16,7 +16,7 @@ msgstr "utilisateurs"
|
|||
Once all translations are done, they need to be compiled into `.mo` files (stands for Machine Object), which are the actual binary files used by the application:
|
||||
|
||||
```bash
|
||||
{% if cookiecutter.use_docker == 'y' %}docker compose -f local.yml run --rm django {% endif %}python manage.py compilemessages
|
||||
{% if cookiecutter.use_docker == 'y' %}docker compose -f docker-compose.local.yml run --rm django {% endif %}python manage.py compilemessages
|
||||
```
|
||||
|
||||
Note that the `.po` files are NOT used by the application directly, so if the `.mo` files are out of dates, the content won't appear as translated even if the `.po` files are up-to-date.
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
#!/usr/bin/env python
|
||||
# ruff: noqa
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
@ -13,7 +14,7 @@ if __name__ == "__main__":
|
|||
# issue is really that Django is missing to avoid masking other
|
||||
# exceptions on Python 2.
|
||||
try:
|
||||
import django # noqa
|
||||
import django
|
||||
except ImportError:
|
||||
raise ImportError(
|
||||
"Couldn't import Django. Are you sure it's installed and "
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
# ruff: noqa
|
||||
import os
|
||||
from collections.abc import Sequence
|
||||
from pathlib import Path
|
||||
|
|
|
@ -8,15 +8,15 @@
|
|||
"autoprefixer": "^10.4.0",
|
||||
"babel-loader": "^9.1.2",
|
||||
"bootstrap": "^5.2.3",
|
||||
"browser-sync": "^2.27.7",
|
||||
"browser-sync": "^3.0.2",
|
||||
"css-loader": "^6.5.1",
|
||||
"gulp-concat": "^2.6.1",
|
||||
"concurrently": "^8.0.1",
|
||||
"cssnano": "^6.0.0",
|
||||
"cssnano": "^7.0.0",
|
||||
"gulp": "^4.0.2",
|
||||
"gulp-imagemin": "^7.1.0",
|
||||
"gulp-plumber": "^1.2.1",
|
||||
"gulp-postcss": "^9.0.1",
|
||||
"gulp-postcss": "^10.0.0",
|
||||
"gulp-rename": "^2.0.0",
|
||||
"gulp-sass": "^5.0.0",
|
||||
"gulp-uglify-es": "^3.0.0",
|
||||
|
@ -24,18 +24,18 @@
|
|||
"node-sass-tilde-importer": "^1.0.2",
|
||||
"pixrem": "^5.0.0",
|
||||
"postcss": "^8.3.11",
|
||||
"postcss-loader": "^7.0.2",
|
||||
"postcss-loader": "^8.0.0",
|
||||
"postcss-preset-env": "^9.0.0",
|
||||
"sass": "^1.43.4",
|
||||
"sass-loader": "^13.2.0",
|
||||
"sass-loader": "^14.0.0",
|
||||
"webpack": "^5.65.0",
|
||||
"webpack-bundle-tracker": "^2.0.0",
|
||||
"webpack-bundle-tracker": "^3.0.1",
|
||||
"webpack-cli": "^5.0.1",
|
||||
"webpack-dev-server": "^4.6.0",
|
||||
"webpack-dev-server": "^5.0.2",
|
||||
"webpack-merge": "^5.8.0"
|
||||
},
|
||||
"engines": {
|
||||
"node": "18"
|
||||
"node": "20"
|
||||
},
|
||||
"browserslist": [
|
||||
"last 2 versions"
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
# ==== pytest ====
|
||||
[tool.pytest.ini_options]
|
||||
minversion = "6.0"
|
||||
addopts = "--ds=config.settings.test --reuse-db"
|
||||
addopts = "--ds=config.settings.test --reuse-db --import-mode=importlib"
|
||||
python_files = [
|
||||
"tests.py",
|
||||
"test_*.py",
|
||||
|
@ -16,28 +16,9 @@ include = ["{{cookiecutter.project_slug}}/**"]
|
|||
omit = ["*/migrations/*", "*/tests/*"]
|
||||
plugins = ["django_coverage_plugin"]
|
||||
|
||||
|
||||
# ==== black ====
|
||||
[tool.black]
|
||||
line-length = 119
|
||||
target-version = ['py311']
|
||||
|
||||
|
||||
# ==== isort ====
|
||||
[tool.isort]
|
||||
profile = "black"
|
||||
line_length = 119
|
||||
known_first_party = [
|
||||
"{{cookiecutter.project_slug}}",
|
||||
"config",
|
||||
]
|
||||
skip = ["venv/"]
|
||||
skip_glob = ["**/migrations/*.py"]
|
||||
|
||||
|
||||
# ==== mypy ====
|
||||
[tool.mypy]
|
||||
python_version = "3.11"
|
||||
python_version = "3.12"
|
||||
check_untyped_defs = true
|
||||
ignore_missing_imports = true
|
||||
warn_unused_ignores = true
|
||||
|
@ -58,47 +39,13 @@ ignore_errors = true
|
|||
[tool.django-stubs]
|
||||
django_settings_module = "config.settings.test"
|
||||
|
||||
|
||||
# ==== PyLint ====
|
||||
[tool.pylint.MASTER]
|
||||
load-plugins = [
|
||||
"pylint_django",
|
||||
{%- if cookiecutter.use_celery == "y" %}
|
||||
"pylint_celery",
|
||||
{%- endif %}
|
||||
]
|
||||
django-settings-module = "config.settings.local"
|
||||
|
||||
[tool.pylint.FORMAT]
|
||||
max-line-length = 119
|
||||
|
||||
[tool.pylint."MESSAGES CONTROL"]
|
||||
disable = [
|
||||
"missing-docstring",
|
||||
"invalid-name",
|
||||
]
|
||||
|
||||
[tool.pylint.DESIGN]
|
||||
max-parents = 13
|
||||
|
||||
[tool.pylint.TYPECHECK]
|
||||
generated-members = [
|
||||
"REQUEST",
|
||||
"acl_users",
|
||||
"aq_parent",
|
||||
"[a-zA-Z]+_set{1,2}",
|
||||
"save",
|
||||
"delete",
|
||||
]
|
||||
|
||||
|
||||
# ==== djLint ====
|
||||
[tool.djlint]
|
||||
blank_line_after_tag = "load,extends"
|
||||
close_void_tags = true
|
||||
format_css = true
|
||||
format_js = true
|
||||
# TODO: remove T002 when fixed https://github.com/Riverside-Healthcare/djLint/issues/687
|
||||
# TODO: remove T002 when fixed https://github.com/djlint/djLint/issues/687
|
||||
ignore = "H006,H030,H031,T002"
|
||||
include = "H017,H035"
|
||||
indent = 2
|
||||
|
@ -110,3 +57,121 @@ indent_size = 2
|
|||
|
||||
[tool.djlint.js]
|
||||
indent_size = 2
|
||||
|
||||
[tool.ruff]
|
||||
# Exclude a variety of commonly ignored directories.
|
||||
exclude = [
|
||||
".bzr",
|
||||
".direnv",
|
||||
".eggs",
|
||||
".git",
|
||||
".git-rewrite",
|
||||
".hg",
|
||||
".mypy_cache",
|
||||
".nox",
|
||||
".pants.d",
|
||||
".pytype",
|
||||
".ruff_cache",
|
||||
".svn",
|
||||
".tox",
|
||||
".venv",
|
||||
"__pypackages__",
|
||||
"_build",
|
||||
"buck-out",
|
||||
"build",
|
||||
"dist",
|
||||
"node_modules",
|
||||
"venv",
|
||||
"*/migrations/*.py",
|
||||
"staticfiles/*"
|
||||
]
|
||||
# Same as Django: https://github.com/cookiecutter/cookiecutter-django/issues/4792.
|
||||
line-length = 88
|
||||
indent-width = 4
|
||||
target-version = "py312"
|
||||
|
||||
[tool.ruff.lint]
|
||||
select = [
|
||||
"F",
|
||||
"E",
|
||||
"W",
|
||||
"C90",
|
||||
"I",
|
||||
"N",
|
||||
"UP",
|
||||
"YTT",
|
||||
# "ANN", # flake8-annotations: we should support this in the future but 100+ errors atm
|
||||
"ASYNC",
|
||||
"S",
|
||||
"BLE",
|
||||
"FBT",
|
||||
"B",
|
||||
"A",
|
||||
"COM",
|
||||
"C4",
|
||||
"DTZ",
|
||||
"T10",
|
||||
"DJ",
|
||||
"EM",
|
||||
"EXE",
|
||||
"FA",
|
||||
'ISC',
|
||||
"ICN",
|
||||
"G",
|
||||
'INP',
|
||||
'PIE',
|
||||
"T20",
|
||||
'PYI',
|
||||
'PT',
|
||||
"Q",
|
||||
"RSE",
|
||||
"RET",
|
||||
"SLF",
|
||||
"SLOT",
|
||||
"SIM",
|
||||
"TID",
|
||||
"TCH",
|
||||
"INT",
|
||||
# "ARG", # Unused function argument
|
||||
"PTH",
|
||||
"ERA",
|
||||
"PD",
|
||||
"PGH",
|
||||
"PL",
|
||||
"TRY",
|
||||
"FLY",
|
||||
# "NPY",
|
||||
# "AIR",
|
||||
"PERF",
|
||||
# "FURB",
|
||||
# "LOG",
|
||||
"RUF"
|
||||
]
|
||||
ignore = [
|
||||
"S101", # Use of assert detected https://docs.astral.sh/ruff/rules/assert/
|
||||
"RUF012", # Mutable class attributes should be annotated with `typing.ClassVar`
|
||||
"SIM102", # sometimes it's better to nest
|
||||
"UP038" # Checks for uses of isinstance/issubclass that take a tuple
|
||||
# of types for comparison.
|
||||
# Deactivated because it can make the code slow:
|
||||
# https://github.com/astral-sh/ruff/issues/7871
|
||||
]
|
||||
# Allow fix for all enabled rules (when `--fix`) is provided.
|
||||
fixable = ["ALL"]
|
||||
unfixable = []
|
||||
# The fixes in extend-unsafe-fixes will require
|
||||
# provide the `--unsafe-fixes` flag when fixing.
|
||||
extend-unsafe-fixes = [
|
||||
"UP038"
|
||||
]
|
||||
# Allow unused variables when underscore-prefixed.
|
||||
dummy-variable-rgx = "^(_+|(_+[a-zA-Z0-9_]*[a-zA-Z0-9]+?))$"
|
||||
|
||||
[tool.ruff.format]
|
||||
quote-style = "double"
|
||||
indent-style = "space"
|
||||
skip-magic-trailing-comma = false
|
||||
line-ending = "auto"
|
||||
|
||||
[tool.ruff.lint.isort]
|
||||
force-single-line = true
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
python-slugify==8.0.1 # https://github.com/un33k/python-slugify
|
||||
Pillow==10.0.1 # https://github.com/python-pillow/Pillow
|
||||
python-slugify==8.0.4 # https://github.com/un33k/python-slugify
|
||||
Pillow==10.3.0 # https://github.com/python-pillow/Pillow
|
||||
{%- if cookiecutter.frontend_pipeline == 'Django Compressor' %}
|
||||
{%- if cookiecutter.windows == 'y' and cookiecutter.use_docker == 'n' %}
|
||||
rcssmin==1.1.2 --install-option="--without-c-extensions" # https://github.com/ndparker/rcssmin
|
||||
|
@ -9,42 +9,43 @@ rcssmin==1.1.2 # https://github.com/ndparker/rcssmin
|
|||
{%- endif %}
|
||||
argon2-cffi==23.1.0 # https://github.com/hynek/argon2_cffi
|
||||
{%- if cookiecutter.use_whitenoise == 'y' %}
|
||||
whitenoise==6.5.0 # https://github.com/evansd/whitenoise
|
||||
whitenoise==6.6.0 # https://github.com/evansd/whitenoise
|
||||
{%- endif %}
|
||||
redis==5.0.1 # https://github.com/redis/redis-py
|
||||
redis==5.0.6 # https://github.com/redis/redis-py
|
||||
{%- if cookiecutter.use_docker == "y" or cookiecutter.windows == "n" %}
|
||||
hiredis==2.2.3 # https://github.com/redis/hiredis-py
|
||||
hiredis==2.3.2 # https://github.com/redis/hiredis-py
|
||||
{%- endif %}
|
||||
{%- if cookiecutter.use_celery == "y" %}
|
||||
celery==5.3.4 # pyup: < 6.0 # https://github.com/celery/celery
|
||||
django-celery-beat==2.5.0 # https://github.com/celery/django-celery-beat
|
||||
celery==5.4.0 # pyup: < 6.0 # https://github.com/celery/celery
|
||||
django-celery-beat==2.6.0 # https://github.com/celery/django-celery-beat
|
||||
{%- if cookiecutter.use_docker == 'y' %}
|
||||
flower==2.0.1 # https://github.com/mher/flower
|
||||
{%- endif %}
|
||||
{%- endif %}
|
||||
{%- if cookiecutter.use_async == 'y' %}
|
||||
uvicorn[standard]==0.23.2 # https://github.com/encode/uvicorn
|
||||
uvicorn[standard]==0.30.1 # https://github.com/encode/uvicorn
|
||||
uvicorn-worker==0.2.0 # https://github.com/Kludex/uvicorn-worker
|
||||
{%- endif %}
|
||||
|
||||
# Django
|
||||
# ------------------------------------------------------------------------------
|
||||
django==4.2.5 # pyup: < 5.0 # https://www.djangoproject.com/
|
||||
django==4.2.13 # pyup: < 5.0 # https://www.djangoproject.com/
|
||||
django-environ==0.11.2 # https://github.com/joke2k/django-environ
|
||||
django-model-utils==4.3.1 # https://github.com/jazzband/django-model-utils
|
||||
django-allauth==0.57.0 # https://github.com/pennersr/django-allauth
|
||||
django-crispy-forms==2.0 # https://github.com/django-crispy-forms/django-crispy-forms
|
||||
crispy-bootstrap5==0.7 # https://github.com/django-crispy-forms/crispy-bootstrap5
|
||||
django-model-utils==4.5.1 # https://github.com/jazzband/django-model-utils
|
||||
django-allauth[mfa]==0.63.3 # https://github.com/pennersr/django-allauth
|
||||
django-crispy-forms==2.2 # https://github.com/django-crispy-forms/django-crispy-forms
|
||||
crispy-bootstrap5==2024.2 # https://github.com/django-crispy-forms/crispy-bootstrap5
|
||||
{%- if cookiecutter.frontend_pipeline == 'Django Compressor' %}
|
||||
django-compressor==4.4 # https://github.com/django-compressor/django-compressor
|
||||
{%- endif %}
|
||||
django-redis==5.3.0 # https://github.com/jazzband/django-redis
|
||||
django-redis==5.4.0 # https://github.com/jazzband/django-redis
|
||||
{%- if cookiecutter.use_drf == 'y' %}
|
||||
# Django REST Framework
|
||||
djangorestframework==3.14.0 # https://github.com/encode/django-rest-framework
|
||||
django-cors-headers==4.2.0 # https://github.com/adamchainz/django-cors-headers
|
||||
djangorestframework==3.15.1 # https://github.com/encode/django-rest-framework
|
||||
django-cors-headers==4.3.1 # https://github.com/adamchainz/django-cors-headers
|
||||
# DRF-spectacular for api documentation
|
||||
drf-spectacular==0.26.5 # https://github.com/tfranzel/drf-spectacular
|
||||
drf-spectacular==0.27.2 # https://github.com/tfranzel/drf-spectacular
|
||||
{%- endif %}
|
||||
{%- if cookiecutter.frontend_pipeline == 'Webpack' %}
|
||||
django-webpack-loader==2.0.1 # https://github.com/django-webpack/django-webpack-loader
|
||||
django-webpack-loader==3.1.0 # https://github.com/django-webpack/django-webpack-loader
|
||||
{%- endif %}
|
||||
|
|
|
@ -1,49 +1,43 @@
|
|||
-r base.txt
|
||||
-r production.txt
|
||||
|
||||
Werkzeug[watchdog]==2.3.7 # https://github.com/pallets/werkzeug
|
||||
Werkzeug[watchdog]==3.0.3 # https://github.com/pallets/werkzeug
|
||||
ipdb==0.13.13 # https://github.com/gotcha/ipdb
|
||||
{%- if cookiecutter.use_docker == 'y' %}
|
||||
psycopg[c]==3.1.12 # https://github.com/psycopg/psycopg
|
||||
psycopg[c]==3.1.19 # https://github.com/psycopg/psycopg
|
||||
{%- else %}
|
||||
psycopg[binary]==3.1.12 # https://github.com/psycopg/psycopg
|
||||
psycopg[binary]==3.1.19 # https://github.com/psycopg/psycopg
|
||||
{%- endif %}
|
||||
{%- if cookiecutter.use_async == 'y' or cookiecutter.use_celery == 'y' %}
|
||||
watchfiles==0.20.0 # https://github.com/samuelcolvin/watchfiles
|
||||
watchfiles==0.22.0 # https://github.com/samuelcolvin/watchfiles
|
||||
{%- endif %}
|
||||
|
||||
# Testing
|
||||
# ------------------------------------------------------------------------------
|
||||
mypy==1.4.1 # https://github.com/python/mypy
|
||||
django-stubs[compatible-mypy]==4.2.3 # https://github.com/typeddjango/django-stubs
|
||||
pytest==7.4.2 # https://github.com/pytest-dev/pytest
|
||||
pytest-sugar==0.9.7 # https://github.com/Frozenball/pytest-sugar
|
||||
mypy==1.10.0 # https://github.com/python/mypy
|
||||
django-stubs[compatible-mypy]==5.0.2 # https://github.com/typeddjango/django-stubs
|
||||
pytest==8.2.2 # https://github.com/pytest-dev/pytest
|
||||
pytest-sugar==1.0.0 # https://github.com/Frozenball/pytest-sugar
|
||||
{%- if cookiecutter.use_drf == "y" %}
|
||||
djangorestframework-stubs[compatible-mypy]==3.14.2 # https://github.com/typeddjango/djangorestframework-stubs
|
||||
djangorestframework-stubs[compatible-mypy]==3.15.0 # https://github.com/typeddjango/djangorestframework-stubs
|
||||
{%- endif %}
|
||||
|
||||
# Documentation
|
||||
# ------------------------------------------------------------------------------
|
||||
sphinx==7.2.6 # https://github.com/sphinx-doc/sphinx
|
||||
sphinx-autobuild==2021.3.14 # https://github.com/GaretJax/sphinx-autobuild
|
||||
sphinx==7.3.7 # https://github.com/sphinx-doc/sphinx
|
||||
sphinx-autobuild==2024.4.16 # https://github.com/GaretJax/sphinx-autobuild
|
||||
|
||||
# Code quality
|
||||
# ------------------------------------------------------------------------------
|
||||
flake8==6.1.0 # https://github.com/PyCQA/flake8
|
||||
flake8-isort==6.1.0 # https://github.com/gforcada/flake8-isort
|
||||
coverage==7.3.1 # https://github.com/nedbat/coveragepy
|
||||
black==23.9.1 # https://github.com/psf/black
|
||||
djlint==1.34.0 # https://github.com/Riverside-Healthcare/djLint
|
||||
pylint-django==2.5.3 # https://github.com/PyCQA/pylint-django
|
||||
{%- if cookiecutter.use_celery == 'y' %}
|
||||
pylint-celery==0.3 # https://github.com/PyCQA/pylint-celery
|
||||
{%- endif %}
|
||||
pre-commit==3.4.0 # https://github.com/pre-commit/pre-commit
|
||||
ruff==0.4.9 # https://github.com/astral-sh/ruff
|
||||
coverage==7.5.3 # https://github.com/nedbat/coveragepy
|
||||
djlint==1.34.1 # https://github.com/Riverside-Healthcare/djLint
|
||||
pre-commit==3.7.1 # https://github.com/pre-commit/pre-commit
|
||||
|
||||
# Django
|
||||
# ------------------------------------------------------------------------------
|
||||
factory-boy==3.3.0 # https://github.com/FactoryBoy/factory_boy
|
||||
|
||||
django-debug-toolbar==4.2.0 # https://github.com/jazzband/django-debug-toolbar
|
||||
django-debug-toolbar==4.4.2 # https://github.com/jazzband/django-debug-toolbar
|
||||
django-extensions==3.2.3 # https://github.com/django-extensions/django-extensions
|
||||
django-coverage-plugin==3.1.0 # https://github.com/nedbat/django_coverage_plugin
|
||||
pytest-django==4.5.2 # https://github.com/pytest-dev/pytest-django
|
||||
pytest-django==4.8.0 # https://github.com/pytest-dev/pytest-django
|
||||
|
|
|
@ -2,43 +2,43 @@
|
|||
|
||||
-r base.txt
|
||||
|
||||
gunicorn==21.2.0 # https://github.com/benoitc/gunicorn
|
||||
psycopg[c]==3.1.12 # https://github.com/psycopg/psycopg
|
||||
gunicorn==22.0.0 # https://github.com/benoitc/gunicorn
|
||||
psycopg[c]==3.1.19 # https://github.com/psycopg/psycopg
|
||||
{%- if cookiecutter.use_whitenoise == 'n' %}
|
||||
Collectfast==2.2.0 # https://github.com/antonagestam/collectfast
|
||||
{%- endif %}
|
||||
{%- if cookiecutter.use_sentry == "y" %}
|
||||
sentry-sdk==1.31.0 # https://github.com/getsentry/sentry-python
|
||||
sentry-sdk==2.5.1 # https://github.com/getsentry/sentry-python
|
||||
{%- endif %}
|
||||
{%- if cookiecutter.use_docker == "n" and cookiecutter.windows == "y" %}
|
||||
hiredis==2.2.3 # https://github.com/redis/hiredis-py
|
||||
hiredis==2.3.2 # https://github.com/redis/hiredis-py
|
||||
{%- endif %}
|
||||
|
||||
# Django
|
||||
# ------------------------------------------------------------------------------
|
||||
{%- if cookiecutter.cloud_provider == 'AWS' %}
|
||||
django-storages[s3]==1.14.1 # https://github.com/jschneier/django-storages
|
||||
django-storages[s3]==1.14.3 # https://github.com/jschneier/django-storages
|
||||
{%- elif cookiecutter.cloud_provider == 'GCP' %}
|
||||
django-storages[google]==1.14.1 # https://github.com/jschneier/django-storages
|
||||
django-storages[google]==1.14.3 # https://github.com/jschneier/django-storages
|
||||
{%- elif cookiecutter.cloud_provider == 'Azure' %}
|
||||
django-storages[azure]==1.14.1 # https://github.com/jschneier/django-storages
|
||||
django-storages[azure]==1.14.3 # https://github.com/jschneier/django-storages
|
||||
{%- endif %}
|
||||
{%- if cookiecutter.mail_service == 'Mailgun' %}
|
||||
django-anymail[mailgun]==10.1 # https://github.com/anymail/django-anymail
|
||||
django-anymail[mailgun]==10.3 # https://github.com/anymail/django-anymail
|
||||
{%- elif cookiecutter.mail_service == 'Amazon SES' %}
|
||||
django-anymail[amazon-ses]==10.1 # https://github.com/anymail/django-anymail
|
||||
django-anymail[amazon-ses]==10.3 # https://github.com/anymail/django-anymail
|
||||
{%- elif cookiecutter.mail_service == 'Mailjet' %}
|
||||
django-anymail[mailjet]==10.1 # https://github.com/anymail/django-anymail
|
||||
django-anymail[mailjet]==10.3 # https://github.com/anymail/django-anymail
|
||||
{%- elif cookiecutter.mail_service == 'Mandrill' %}
|
||||
django-anymail[mandrill]==10.1 # https://github.com/anymail/django-anymail
|
||||
django-anymail[mandrill]==10.3 # https://github.com/anymail/django-anymail
|
||||
{%- elif cookiecutter.mail_service == 'Postmark' %}
|
||||
django-anymail[postmark]==10.1 # https://github.com/anymail/django-anymail
|
||||
django-anymail[postmark]==10.3 # https://github.com/anymail/django-anymail
|
||||
{%- elif cookiecutter.mail_service == 'Sendgrid' %}
|
||||
django-anymail[sendgrid]==10.1 # https://github.com/anymail/django-anymail
|
||||
{%- elif cookiecutter.mail_service == 'SendinBlue' %}
|
||||
django-anymail[sendinblue]==10.1 # https://github.com/anymail/django-anymail
|
||||
django-anymail[sendgrid]==10.3 # https://github.com/anymail/django-anymail
|
||||
{%- elif cookiecutter.mail_service == 'Brevo' %}
|
||||
django-anymail[brevo]==10.3 # https://github.com/anymail/django-anymail
|
||||
{%- elif cookiecutter.mail_service == 'SparkPost' %}
|
||||
django-anymail[sparkpost]==10.1 # https://github.com/anymail/django-anymail
|
||||
django-anymail[sparkpost]==10.3 # https://github.com/anymail/django-anymail
|
||||
{%- elif cookiecutter.mail_service == 'Other SMTP' %}
|
||||
django-anymail==10.1 # https://github.com/anymail/django-anymail
|
||||
django-anymail==10.3 # https://github.com/anymail/django-anymail
|
||||
{%- endif %}
|
||||
|
|
|
@ -1 +1 @@
|
|||
python-3.11.6
|
||||
python-3.12.3
|
||||
|
|
|
@ -1,11 +0,0 @@
|
|||
# flake8 and pycodestyle don't support pyproject.toml
|
||||
# https://github.com/PyCQA/flake8/issues/234
|
||||
# https://github.com/PyCQA/pycodestyle/issues/813
|
||||
[flake8]
|
||||
max-line-length = 119
|
||||
exclude = .tox,.git,*/migrations/*,*/static/CACHE/*,docs,node_modules,venv,.venv
|
||||
extend-ignore = E203
|
||||
|
||||
[pycodestyle]
|
||||
max-line-length = 119
|
||||
exclude = .tox,.git,*/migrations/*,*/static/CACHE/*,docs,node_modules,venv,.venv
|
|
@ -9,7 +9,7 @@ python3-dev
|
|||
##Pillow, pylibmc
|
||||
zlib1g-dev
|
||||
|
||||
##Postgresql and psycopg2 dependencies
|
||||
##Postgresql and psycopg dependencies
|
||||
libpq-dev
|
||||
|
||||
##Pillow dependencies
|
||||
|
|
|
@ -0,0 +1,23 @@
|
|||
##basic build dependencies of various Django apps for Debian Bookworm 12.x
|
||||
#build-essential metapackage install: make, gcc, g++,
|
||||
build-essential
|
||||
#required to translate
|
||||
gettext
|
||||
python3-dev
|
||||
|
||||
##shared dependencies of:
|
||||
##Pillow, pylibmc
|
||||
zlib1g-dev
|
||||
|
||||
##Postgresql and psycopg dependencies
|
||||
libpq-dev
|
||||
|
||||
##Pillow dependencies
|
||||
libtiff5-dev
|
||||
libjpeg62-turbo-dev
|
||||
libfreetype6-dev
|
||||
liblcms2-dev
|
||||
libwebp-dev
|
||||
|
||||
##django-extensions
|
||||
libgraphviz-dev
|
|
@ -9,7 +9,7 @@ python3-dev
|
|||
##Pillow, pylibmc
|
||||
zlib1g-dev
|
||||
|
||||
##Postgresql and psycopg2 dependencies
|
||||
##Postgresql and psycopg dependencies
|
||||
libpq-dev
|
||||
|
||||
##Pillow dependencies
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user