mirror of
https://github.com/cookiecutter/cookiecutter-django.git
synced 2025-07-27 00:09:47 +03:00
Compare commits
No commits in common. "master" and "2022.04.05" have entirely different histories.
master
...
2022.04.05
|
@ -12,7 +12,7 @@ trim_trailing_whitespace = true
|
||||||
indent_style = space
|
indent_style = space
|
||||||
indent_size = 4
|
indent_size = 4
|
||||||
|
|
||||||
[*.{html,css,scss,json,yml,xml,toml}]
|
[*.{html,css,scss,json,yml,xml}]
|
||||||
indent_style = space
|
indent_style = space
|
||||||
indent_size = 2
|
indent_size = 2
|
||||||
|
|
||||||
|
|
4
.flake8
4
.flake8
|
@ -1,4 +0,0 @@
|
||||||
[flake8]
|
|
||||||
exclude = docs
|
|
||||||
max-line-length = 119
|
|
||||||
extend-ignore = E203
|
|
10
.github/CONTRIBUTORS-template.md
vendored
10
.github/CONTRIBUTORS-template.md
vendored
|
@ -22,8 +22,8 @@ accept and merge pull requests.
|
||||||
{%- endfor %}
|
{%- endfor %}
|
||||||
</table>
|
</table>
|
||||||
|
|
||||||
_Audrey is also the creator of Cookiecutter. Audrey and Daniel are on
|
*Audrey is also the creator of Cookiecutter. Audrey and Daniel are on
|
||||||
the Cookiecutter core team._
|
the Cookiecutter core team.*
|
||||||
|
|
||||||
## Other Contributors
|
## Other Contributors
|
||||||
|
|
||||||
|
@ -51,6 +51,6 @@ Listed in alphabetical order.
|
||||||
The following haven't provided code directly, but have provided
|
The following haven't provided code directly, but have provided
|
||||||
guidance and advice.
|
guidance and advice.
|
||||||
|
|
||||||
- Jannis Leidel
|
- Jannis Leidel
|
||||||
- Nate Aune
|
- Nate Aune
|
||||||
- Barry Morrison
|
- Barry Morrison
|
||||||
|
|
11
.github/FUNDING.yml
vendored
11
.github/FUNDING.yml
vendored
|
@ -1,5 +1,12 @@
|
||||||
# These are supported funding model platforms
|
# These are supported funding model platforms
|
||||||
|
|
||||||
github: [pydanny, browniebroke, luzfcb]
|
github: [pydanny, browniebroke]
|
||||||
patreon: feldroy
|
patreon: feldroy
|
||||||
open_collective: cookiecutter-django
|
open_collective: # Replace with a single Open Collective username
|
||||||
|
ko_fi: # Replace with a single Ko-fi username
|
||||||
|
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
|
||||||
|
community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry
|
||||||
|
liberapay: # Replace with a single Liberapay username
|
||||||
|
issuehunt: # Replace with a single IssueHunt username
|
||||||
|
otechie: # Replace with a single Otechie username
|
||||||
|
custom: ["https://www.patreon.com/browniebroke"]
|
||||||
|
|
59
.github/ISSUE_TEMPLATE/bug.md
vendored
59
.github/ISSUE_TEMPLATE/bug.md
vendored
|
@ -12,46 +12,41 @@ labels: bug
|
||||||
|
|
||||||
<!-- To assist you best, please include commands that you've run, options you've selected and any relevant logs -->
|
<!-- To assist you best, please include commands that you've run, options you've selected and any relevant logs -->
|
||||||
|
|
||||||
- Host system configuration:
|
* Host system configuration:
|
||||||
- Version of cookiecutter CLI (get it with `cookiecutter --version`):
|
* Version of cookiecutter CLI (get it with `cookiecutter --version`):
|
||||||
- OS name and version:
|
* OS name and version:
|
||||||
|
|
||||||
On Linux, run
|
On Linux, run
|
||||||
|
```bash
|
||||||
|
lsb_release -a 2> /dev/null || cat /etc/redhat-release 2> /dev/null || cat /etc/*-release 2> /dev/null || cat /etc/issue 2> /dev/null
|
||||||
|
```
|
||||||
|
|
||||||
```bash
|
On MacOs, run
|
||||||
lsb_release -a 2> /dev/null || cat /etc/redhat-release 2> /dev/null || cat /etc/*-release 2> /dev/null || cat /etc/issue 2> /dev/null
|
```bash
|
||||||
```
|
sw_vers
|
||||||
|
```
|
||||||
|
|
||||||
On MacOs, run
|
On Windows, via CMD, run
|
||||||
|
```
|
||||||
|
systeminfo | findstr /B /C:"OS Name" /C:"OS Version"
|
||||||
|
```
|
||||||
|
|
||||||
```bash
|
|
||||||
sw_vers
|
|
||||||
```
|
|
||||||
|
|
||||||
On Windows, via CMD, run
|
```bash
|
||||||
|
# Insert here the OS name and version
|
||||||
|
|
||||||
```
|
```
|
||||||
systeminfo | findstr /B /C:"OS Name" /C:"OS Version"
|
|
||||||
```
|
|
||||||
|
|
||||||
```bash
|
* Python version, run `python3 -V`:
|
||||||
# Insert here the OS name and version
|
* Docker version (if using Docker), run `docker --version`:
|
||||||
|
* docker-compose version (if using Docker), run `docker-compose --version`:
|
||||||
```
|
* ...
|
||||||
|
* Options selected and/or [replay file](https://cookiecutter.readthedocs.io/en/latest/advanced/replay.html):
|
||||||
- Python version, run `python3 -V`:
|
On Linux and MacOS: `cat ${HOME}/.cookiecutter_replay/cookiecutter-django.json`
|
||||||
- Docker version (if using Docker), run `docker --version`:
|
|
||||||
- docker compose version (if using Docker), run `docker compose --version`:
|
|
||||||
- ...
|
|
||||||
|
|
||||||
- Options selected and/or [replay file](https://cookiecutter.readthedocs.io/en/latest/advanced/replay.html):
|
|
||||||
On Linux and macOS: `cat ${HOME}/.cookiecutter_replay/cookiecutter-django.json`
|
|
||||||
(Please, take care to remove sensitive information)
|
(Please, take care to remove sensitive information)
|
||||||
|
```json
|
||||||
```json
|
# Insert here the replay file content
|
||||||
|
```
|
||||||
```
|
|
||||||
|
|
||||||
<summary>
|
<summary>
|
||||||
Logs:
|
Logs:
|
||||||
<details>
|
<details>
|
||||||
|
|
6
.github/ISSUE_TEMPLATE/paid-support.md
vendored
6
.github/ISSUE_TEMPLATE/paid-support.md
vendored
|
@ -5,8 +5,8 @@ about: Ask Core Team members to help you out
|
||||||
|
|
||||||
Provided your question goes beyond [regular support](https://github.com/cookiecutter/cookiecutter-django/issues/new?template=question.md), and/or the task at hand is of timely/high priority nature use the below information to reach out for contributors directly.
|
Provided your question goes beyond [regular support](https://github.com/cookiecutter/cookiecutter-django/issues/new?template=question.md), and/or the task at hand is of timely/high priority nature use the below information to reach out for contributors directly.
|
||||||
|
|
||||||
- Bruno Alla, Core Developer ([GitHub](https://github.com/sponsors/browniebroke)).
|
* Daniel Roy Greenfeld, Project Lead ([GitHub](https://github.com/pydanny), [Patreon](https://www.patreon.com/danielroygreenfeld)): expertise in Django and AWS ELB.
|
||||||
|
|
||||||
- Daniel Roy Greenfeld, Project Lead ([GitHub](https://github.com/pydanny), [Patreon](https://www.patreon.com/danielroygreenfeld)): expertise in Django and AWS ELB.
|
* Nikita Shupeyko, Core Developer ([GitHub](https://github.com/webyneter)): expertise in Python/Django, hands-on DevOps and frontend experience.
|
||||||
|
|
||||||
- Nikita Shupeyko, Core Developer ([GitHub](https://github.com/webyneter)): expertise in Python/Django, hands-on DevOps and frontend experience.
|
* Bruno Alla, Core Developer ([GitHub](https://github.com/sponsors/browniebroke)).
|
||||||
|
|
1
.github/PULL_REQUEST_TEMPLATE.md
vendored
1
.github/PULL_REQUEST_TEMPLATE.md
vendored
|
@ -1,5 +1,6 @@
|
||||||
<!-- Thank you for helping us out: your efforts mean a great deal to the project and the community as a whole! -->
|
<!-- Thank you for helping us out: your efforts mean a great deal to the project and the community as a whole! -->
|
||||||
|
|
||||||
|
|
||||||
## Description
|
## Description
|
||||||
|
|
||||||
<!-- What's it you're proposing? -->
|
<!-- What's it you're proposing? -->
|
||||||
|
|
9
.github/changelog-template.md
vendored
9
.github/changelog-template.md
vendored
|
@ -1,11 +1,8 @@
|
||||||
{%- for change_type, pulls in grouped_pulls.items() %}
|
{%- for change_type, pulls in grouped_pulls.items() %}
|
||||||
{%- if pulls %}
|
{%- if pulls %}
|
||||||
|
|
||||||
### {{ change_type }}
|
### {{ change_type }}
|
||||||
|
|
||||||
{%- for pull_request in pulls %}
|
{%- for pull_request in pulls %}
|
||||||
|
|
||||||
- {{ pull_request.title }} ([#{{ pull_request.number }}]({{ pull_request.html_url }}))
|
- {{ pull_request.title }} ([#{{ pull_request.number }}]({{ pull_request.html_url }}))
|
||||||
{%- endfor -%}
|
{%- endfor -%}
|
||||||
{% endif -%}
|
{% endif -%}
|
||||||
{% endfor -%}
|
{% endfor -%}
|
||||||
|
|
488
.github/contributors.json
vendored
488
.github/contributors.json
vendored
|
@ -53,12 +53,6 @@
|
||||||
"twitter_username": "sfdye",
|
"twitter_username": "sfdye",
|
||||||
"is_core": true
|
"is_core": true
|
||||||
},
|
},
|
||||||
{
|
|
||||||
"name": "Jelmer Draaijer",
|
|
||||||
"github_login": "foarsitter",
|
|
||||||
"twitter_username": "",
|
|
||||||
"is_core": true
|
|
||||||
},
|
|
||||||
{
|
{
|
||||||
"name": "18",
|
"name": "18",
|
||||||
"github_login": "dezoito",
|
"github_login": "dezoito",
|
||||||
|
@ -559,6 +553,11 @@
|
||||||
"github_login": "jvanbrug",
|
"github_login": "jvanbrug",
|
||||||
"twitter_username": ""
|
"twitter_username": ""
|
||||||
},
|
},
|
||||||
|
{
|
||||||
|
"name": "Jelmer Draaijer",
|
||||||
|
"github_login": "foarsitter",
|
||||||
|
"twitter_username": ""
|
||||||
|
},
|
||||||
{
|
{
|
||||||
"name": "Jerome Caisip",
|
"name": "Jerome Caisip",
|
||||||
"github_login": "jeromecaisip",
|
"github_login": "jeromecaisip",
|
||||||
|
@ -1115,7 +1114,7 @@
|
||||||
"twitter_username": "Qoyyuum"
|
"twitter_username": "Qoyyuum"
|
||||||
},
|
},
|
||||||
{
|
{
|
||||||
"name": "Matthew Foster Walsh",
|
"name": "mfosterw",
|
||||||
"github_login": "mfosterw",
|
"github_login": "mfosterw",
|
||||||
"twitter_username": ""
|
"twitter_username": ""
|
||||||
},
|
},
|
||||||
|
@ -1258,480 +1257,5 @@
|
||||||
"name": "innicoder",
|
"name": "innicoder",
|
||||||
"github_login": "innicoder",
|
"github_login": "innicoder",
|
||||||
"twitter_username": ""
|
"twitter_username": ""
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Naveen",
|
|
||||||
"github_login": "naveensrinivasan",
|
|
||||||
"twitter_username": "snaveen"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Nikita Sobolev",
|
|
||||||
"github_login": "sobolevn",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Sebastian Reyes Espinosa",
|
|
||||||
"github_login": "sebastian-code",
|
|
||||||
"twitter_username": "sebastianreyese"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "jugglinmike",
|
|
||||||
"github_login": "jugglinmike",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "monosans",
|
|
||||||
"github_login": "monosans",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Marcio Mazza",
|
|
||||||
"github_login": "marciomazza",
|
|
||||||
"twitter_username": "marciomazza"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Brandon Rumiser",
|
|
||||||
"github_login": "brumiser1550",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "krati yadav",
|
|
||||||
"github_login": "krati5",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Abe Hanoka",
|
|
||||||
"github_login": "abe-101",
|
|
||||||
"twitter_username": "abe__101"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Adin Hodovic",
|
|
||||||
"github_login": "adinhodovic",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Leifur Halldor Asgeirsson",
|
|
||||||
"github_login": "leifurhauks",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "David",
|
|
||||||
"github_login": "buckldav",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "rguptar",
|
|
||||||
"github_login": "rguptar",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Omer-5",
|
|
||||||
"github_login": "Omer-5",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "TAKAHASHI Shuuji",
|
|
||||||
"github_login": "shuuji3",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Thomas Booij",
|
|
||||||
"github_login": "ThomasBooij95",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Pamela Fox",
|
|
||||||
"github_login": "pamelafox",
|
|
||||||
"twitter_username": "pamelafox"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Robin",
|
|
||||||
"github_login": "Kaffeetasse",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Patrick Tran",
|
|
||||||
"github_login": "theptrk",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "tildebox",
|
|
||||||
"github_login": "tildebox",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "duffn",
|
|
||||||
"github_login": "duffn",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Delphine LEMIRE",
|
|
||||||
"github_login": "DelphineLemire",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Hoai-Thu Vuong",
|
|
||||||
"github_login": "thuvh",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Arkadiusz Michał Ryś",
|
|
||||||
"github_login": "arrys",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "mpsantos",
|
|
||||||
"github_login": "mpsantos",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Morten Kaae",
|
|
||||||
"github_login": "MortenKaae",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Birtibu",
|
|
||||||
"github_login": "Birtibu",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Matheus Jardim Bernardes",
|
|
||||||
"github_login": "matheusjardimb",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "masavini",
|
|
||||||
"github_login": "masavini",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Joseph Hanna",
|
|
||||||
"github_login": "sanchimenea",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "tmajerech",
|
|
||||||
"github_login": "tmajerech",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "villancikos",
|
|
||||||
"github_login": "villancikos",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Imran Rahman",
|
|
||||||
"github_login": "infraredCoding",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "hleroy",
|
|
||||||
"github_login": "hleroy",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Shayan Karimi",
|
|
||||||
"github_login": "shywn-mrk",
|
|
||||||
"twitter_username": "shywn_mrk"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Sadra Yahyapour",
|
|
||||||
"github_login": "lnxpy",
|
|
||||||
"twitter_username": "lnxpylnxpy"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Tharushan",
|
|
||||||
"github_login": "Tharushan",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Fateme Fouladkar",
|
|
||||||
"github_login": "FatemeFouladkar",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "zhaoruibing",
|
|
||||||
"github_login": "zhaoruibing",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "MinWoo Sung",
|
|
||||||
"github_login": "SungMinWoo",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "itisnotyourenv",
|
|
||||||
"github_login": "itisnotyourenv",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Vageeshan Mankala",
|
|
||||||
"github_login": "vagi8",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Jakub Boukal",
|
|
||||||
"github_login": "SukiCZ",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Christian Jauvin",
|
|
||||||
"github_login": "cjauvin",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Plurific",
|
|
||||||
"github_login": "paulschwenn",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "GitBib",
|
|
||||||
"github_login": "GitBib",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Freddy",
|
|
||||||
"github_login": "Hraesvelg",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "aiden",
|
|
||||||
"github_login": "anyidea",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Michael V. Battista",
|
|
||||||
"github_login": "mvbattista",
|
|
||||||
"twitter_username": "mvbattista"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Nix Siow",
|
|
||||||
"github_login": "nixsiow",
|
|
||||||
"twitter_username": "nixsiow"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Jens Kaeske",
|
|
||||||
"github_login": "jkaeske",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "henningbra",
|
|
||||||
"github_login": "henningbra",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Paul Wulff",
|
|
||||||
"github_login": "mtmpaulwulff",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Mounir",
|
|
||||||
"github_login": "mounirmesselmeni",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "JAEGYUN JUNG",
|
|
||||||
"github_login": "TGoddessana",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Simeon Emanuilov",
|
|
||||||
"github_login": "s-emanuilov",
|
|
||||||
"twitter_username": "s_emanuilov"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Patrick Zhang",
|
|
||||||
"github_login": "PatDuJour",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "GvS",
|
|
||||||
"github_login": "GvS666",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "David Păcioianu",
|
|
||||||
"github_login": "DavidPacioianu",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "farwill",
|
|
||||||
"github_login": "farwill",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "quroom",
|
|
||||||
"github_login": "quroom",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Marios Frixou",
|
|
||||||
"github_login": "frixou89",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Geo Maciolek",
|
|
||||||
"github_login": "GeoMaciolek",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Nadav Peretz",
|
|
||||||
"github_login": "nadavperetz",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Param Kapur",
|
|
||||||
"github_login": "paramkpr",
|
|
||||||
"twitter_username": "ParamKapur"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Jason Mok",
|
|
||||||
"github_login": "jasonmokk",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Manas Mallick",
|
|
||||||
"github_login": "ManDun",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Alexandr Artemyev",
|
|
||||||
"github_login": "Mogost",
|
|
||||||
"twitter_username": "MOGOST"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Ali Shamakhi",
|
|
||||||
"github_login": "ali-shamakhi",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Filipe Nascimento",
|
|
||||||
"github_login": "FilipeNas",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Kevin Mills",
|
|
||||||
"github_login": "millsks",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "milvagox",
|
|
||||||
"github_login": "milvagox",
|
|
||||||
"twitter_username": "milvagox"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Johnny Metz",
|
|
||||||
"github_login": "johnnymetz",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Will",
|
|
||||||
"github_login": "novucs",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "rxm7706",
|
|
||||||
"github_login": "rxm7706",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Marlon Castillo",
|
|
||||||
"github_login": "mcastle",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Alex Kanavos",
|
|
||||||
"github_login": "alexkanavos",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "LJFP",
|
|
||||||
"github_login": "ljfp",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Francisco Navarro Morales ",
|
|
||||||
"github_login": "spothound",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Mariot Tsitoara",
|
|
||||||
"github_login": "mariot",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Christian Jensen",
|
|
||||||
"github_login": "jensenbox",
|
|
||||||
"twitter_username": "cjensen"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Denis Darii",
|
|
||||||
"github_login": "DNX",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "qwerrrqw",
|
|
||||||
"github_login": "qwerrrqw",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Pulse-Mind",
|
|
||||||
"github_login": "pulse-mind",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Hana Belay",
|
|
||||||
"github_login": "earthcomfy",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Ed Morley",
|
|
||||||
"github_login": "edmorley",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Alan Cyment",
|
|
||||||
"github_login": "acyment",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Kawsar Alam Foysal",
|
|
||||||
"github_login": "iamfoysal",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Igor Jerosimić",
|
|
||||||
"github_login": "igor-wl",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Pepa",
|
|
||||||
"github_login": "07pepa",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Aidos Kanapyanov",
|
|
||||||
"github_login": "aidoskanapyanov",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Jeongseok Kang",
|
|
||||||
"github_login": "rapsealk",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Jeff Foster",
|
|
||||||
"github_login": "jeffpfoster",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Dominique Plante",
|
|
||||||
"github_login": "dominiqueplante",
|
|
||||||
"twitter_username": ""
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"name": "Lucas Klasa",
|
|
||||||
"github_login": "lucaskbr",
|
|
||||||
"twitter_username": ""
|
|
||||||
}
|
}
|
||||||
]
|
]
|
45
.github/dependabot.yml
vendored
45
.github/dependabot.yml
vendored
|
@ -1,24 +1,15 @@
|
||||||
# Config for Dependabot updates. See Documentation here:
|
# Config for Dependabot updates. See Documentation here:
|
||||||
# https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
|
# https://docs.github.com/code-security/supply-chain-security/keeping-your-dependencies-updated-automatically/configuration-options-for-dependency-updates
|
||||||
|
|
||||||
version: 2
|
version: 2
|
||||||
enable-beta-ecosystems: true
|
|
||||||
updates:
|
updates:
|
||||||
# Update Python deps for the template (not the generated project)
|
|
||||||
- package-ecosystem: "uv"
|
|
||||||
directory: "/"
|
|
||||||
schedule:
|
|
||||||
interval: "daily"
|
|
||||||
labels:
|
|
||||||
- "project infrastructure"
|
|
||||||
|
|
||||||
# Update GitHub actions in workflows
|
# Update GitHub actions in workflows
|
||||||
- package-ecosystem: "github-actions"
|
- package-ecosystem: "github-actions"
|
||||||
directory: "/"
|
directory: "/"
|
||||||
schedule:
|
schedule:
|
||||||
interval: "daily"
|
interval: "daily"
|
||||||
labels:
|
labels:
|
||||||
- "project infrastructure"
|
- "update"
|
||||||
|
|
||||||
# Update npm packages
|
# Update npm packages
|
||||||
- package-ecosystem: "npm"
|
- package-ecosystem: "npm"
|
||||||
|
@ -27,35 +18,3 @@ updates:
|
||||||
interval: "daily"
|
interval: "daily"
|
||||||
labels:
|
labels:
|
||||||
- "update"
|
- "update"
|
||||||
|
|
||||||
# Enable version updates for Docker
|
|
||||||
- package-ecosystem: "docker"
|
|
||||||
directories:
|
|
||||||
- "{{cookiecutter.project_slug}}/compose/local/django/"
|
|
||||||
- "{{cookiecutter.project_slug}}/compose/local/docs/"
|
|
||||||
- "{{cookiecutter.project_slug}}/compose/production/django/"
|
|
||||||
schedule:
|
|
||||||
interval: "daily"
|
|
||||||
ignore:
|
|
||||||
- dependency-name: "*"
|
|
||||||
update-types:
|
|
||||||
- "version-update:semver-major"
|
|
||||||
- "version-update:semver-minor"
|
|
||||||
labels:
|
|
||||||
- "update"
|
|
||||||
groups:
|
|
||||||
docker-python:
|
|
||||||
patterns:
|
|
||||||
- "*"
|
|
||||||
|
|
||||||
- package-ecosystem: "docker"
|
|
||||||
directories:
|
|
||||||
- "{{cookiecutter.project_slug}}/compose/local/node/"
|
|
||||||
- "{{cookiecutter.project_slug}}/compose/production/aws/"
|
|
||||||
- "{{cookiecutter.project_slug}}/compose/production/postgres/"
|
|
||||||
- "{{cookiecutter.project_slug}}/compose/production/nginx/"
|
|
||||||
- "{{cookiecutter.project_slug}}/compose/production/traefik/"
|
|
||||||
schedule:
|
|
||||||
interval: "daily"
|
|
||||||
labels:
|
|
||||||
- "update"
|
|
||||||
|
|
52
.github/workflows/align-versions.yml
vendored
52
.github/workflows/align-versions.yml
vendored
|
@ -1,52 +0,0 @@
|
||||||
name: align versions
|
|
||||||
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
paths:
|
|
||||||
- "{{cookiecutter.project_slug}}/requirements/local.txt"
|
|
||||||
- "{{cookiecutter.project_slug}}/compose/local/node/Dockerfile"
|
|
||||||
# Manual trigger
|
|
||||||
workflow_dispatch:
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
pull-requests: write
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
run:
|
|
||||||
if: ${{ github.actor == 'pyup-bot' }}
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
env:
|
|
||||||
GH_PAT: ${{ secrets.GH_PAT }}
|
|
||||||
|
|
||||||
strategy:
|
|
||||||
fail-fast: false
|
|
||||||
matrix:
|
|
||||||
job:
|
|
||||||
- script: scripts/ruff_version.py
|
|
||||||
name: Ruff
|
|
||||||
- script: scripts/node_version.py
|
|
||||||
name: Node
|
|
||||||
|
|
||||||
name: "${{ matrix.job.name }} versions"
|
|
||||||
steps:
|
|
||||||
- name: Checkout with token
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
if: ${{ env.GH_PAT != '' }}
|
|
||||||
with:
|
|
||||||
token: ${{ env.GH_PAT }}
|
|
||||||
ref: ${{ github.head_ref }}
|
|
||||||
|
|
||||||
- name: Checkout without token
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
if: ${{ env.GH_PAT == '' }}
|
|
||||||
with:
|
|
||||||
ref: ${{ github.head_ref }}
|
|
||||||
|
|
||||||
- uses: astral-sh/setup-uv@v6
|
|
||||||
|
|
||||||
- run: uv run ${{ matrix.job.script }}
|
|
||||||
|
|
||||||
- uses: stefanzweifel/git-auto-commit-action@v6
|
|
||||||
with:
|
|
||||||
commit_message: Align versions
|
|
83
.github/workflows/ci.yml
vendored
83
.github/workflows/ci.yml
vendored
|
@ -2,7 +2,6 @@ name: CI
|
||||||
|
|
||||||
on:
|
on:
|
||||||
push:
|
push:
|
||||||
branches: ["master", "main"]
|
|
||||||
pull_request:
|
pull_request:
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
|
@ -10,6 +9,17 @@ concurrency:
|
||||||
cancel-in-progress: true
|
cancel-in-progress: true
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
|
lint:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- uses: actions/setup-python@v3
|
||||||
|
with:
|
||||||
|
python-version: "3.9"
|
||||||
|
cache: pip
|
||||||
|
- name: Run pre-commit
|
||||||
|
uses: pre-commit/action@v2.0.3
|
||||||
|
|
||||||
tests:
|
tests:
|
||||||
strategy:
|
strategy:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
|
@ -19,16 +29,18 @@ jobs:
|
||||||
- windows-latest
|
- windows-latest
|
||||||
- macOS-latest
|
- macOS-latest
|
||||||
|
|
||||||
name: "pytest ${{ matrix.os }}"
|
name: "Run tests"
|
||||||
runs-on: ${{ matrix.os }}
|
runs-on: ${{ matrix.os }}
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
- name: Install uv
|
- uses: actions/setup-python@v3
|
||||||
uses: astral-sh/setup-uv@v6
|
with:
|
||||||
|
python-version: "3.9"
|
||||||
|
cache: pip
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: uv sync
|
run: pip install -r requirements.txt
|
||||||
- name: Run tests
|
- name: Run tests
|
||||||
run: uv run pytest -n auto tests
|
run: pytest tests
|
||||||
|
|
||||||
docker:
|
docker:
|
||||||
strategy:
|
strategy:
|
||||||
|
@ -36,26 +48,24 @@ jobs:
|
||||||
matrix:
|
matrix:
|
||||||
script:
|
script:
|
||||||
- name: Basic
|
- name: Basic
|
||||||
args: "ci_tool=Gitlab"
|
args: ""
|
||||||
- name: Celery & DRF
|
- name: Extended
|
||||||
args: "use_celery=y use_drf=y"
|
args: "use_celery=y use_drf=y frontend_pipeline=Gulp"
|
||||||
- name: Gulp
|
|
||||||
args: "frontend_pipeline=Gulp"
|
|
||||||
- name: Webpack
|
|
||||||
args: "frontend_pipeline=Webpack"
|
|
||||||
|
|
||||||
name: "Docker ${{ matrix.script.name }}"
|
name: "${{ matrix.script.name }} Docker"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
env:
|
env:
|
||||||
DOCKER_BUILDKIT: 1
|
DOCKER_BUILDKIT: 1
|
||||||
COMPOSE_DOCKER_CLI_BUILD: 1
|
COMPOSE_DOCKER_CLI_BUILD: 1
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
- name: Install uv
|
- uses: actions/setup-python@v3
|
||||||
uses: astral-sh/setup-uv@v6
|
with:
|
||||||
|
python-version: "3.9"
|
||||||
|
cache: pip
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: uv sync
|
run: pip install -r requirements.txt
|
||||||
- name: Docker ${{ matrix.script.name }}
|
- name: Docker ${{ matrix.script.name }}
|
||||||
run: sh tests/test_docker.sh ${{ matrix.script.args }}
|
run: sh tests/test_docker.sh ${{ matrix.script.args }}
|
||||||
|
|
||||||
|
@ -64,16 +74,12 @@ jobs:
|
||||||
fail-fast: false
|
fail-fast: false
|
||||||
matrix:
|
matrix:
|
||||||
script:
|
script:
|
||||||
- name: Celery
|
- name: With Celery
|
||||||
args: "use_celery=y frontend_pipeline='Django Compressor'"
|
args: "use_celery=y frontend_pipeline='Django Compressor'"
|
||||||
- name: Gulp
|
- name: With Gulp
|
||||||
args: "frontend_pipeline=Gulp"
|
args: "frontend_pipeline='Gulp'"
|
||||||
- name: Webpack
|
|
||||||
args: "frontend_pipeline=Webpack use_heroku=y"
|
|
||||||
- name: Email Username
|
|
||||||
args: "username_type=email ci_tool=Github project_name='Something superduper long - the great amazing project' project_slug=my_awesome_project"
|
|
||||||
|
|
||||||
name: "Bare metal ${{ matrix.script.name }}"
|
name: "${{ matrix.script.name }} Bare metal"
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
services:
|
services:
|
||||||
redis:
|
redis:
|
||||||
|
@ -81,28 +87,31 @@ jobs:
|
||||||
ports:
|
ports:
|
||||||
- 6379:6379
|
- 6379:6379
|
||||||
postgres:
|
postgres:
|
||||||
image: postgres:13
|
image: postgres:12
|
||||||
ports:
|
ports:
|
||||||
- 5432:5432
|
- 5432:5432
|
||||||
env:
|
env:
|
||||||
POSTGRES_PASSWORD: postgres
|
POSTGRES_PASSWORD: postgres
|
||||||
|
|
||||||
env:
|
env:
|
||||||
REDIS_URL: "redis://localhost:6379/0"
|
CELERY_BROKER_URL: "redis://localhost:6379/0"
|
||||||
# postgres://user:password@host:port/database
|
# postgres://user:password@host:port/database
|
||||||
DATABASE_URL: "postgres://postgres:postgres@localhost:5432/postgres"
|
DATABASE_URL: "postgres://postgres:postgres@localhost:5432/postgres"
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v3
|
||||||
with:
|
with:
|
||||||
python-version: "3.12"
|
python-version: "3.9"
|
||||||
- name: Install uv
|
cache: pip
|
||||||
uses: astral-sh/setup-uv@v6
|
cache-dependency-path: |
|
||||||
|
requirements.txt
|
||||||
|
{{cookiecutter.project_slug}}/requirements/base.txt
|
||||||
|
{{cookiecutter.project_slug}}/requirements/local.txt
|
||||||
- name: Install dependencies
|
- name: Install dependencies
|
||||||
run: uv sync
|
run: pip install -r requirements.txt
|
||||||
- uses: actions/setup-node@v4
|
- uses: actions/setup-node@v3
|
||||||
with:
|
with:
|
||||||
node-version: "22.14"
|
node-version: "16"
|
||||||
- name: Bare Metal ${{ matrix.script.name }}
|
- name: Bare Metal ${{ matrix.script.name }}
|
||||||
run: sh tests/test_bare.sh ${{ matrix.script.args }}
|
run: sh tests/test_bare.sh ${{ matrix.script.args }}
|
||||||
|
|
33
.github/workflows/dependabot-uv-lock.yml
vendored
33
.github/workflows/dependabot-uv-lock.yml
vendored
|
@ -1,33 +0,0 @@
|
||||||
name: uv
|
|
||||||
|
|
||||||
on:
|
|
||||||
pull_request:
|
|
||||||
paths:
|
|
||||||
- "pyproject.toml"
|
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: write
|
|
||||||
pull-requests: write
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
lock:
|
|
||||||
if: ${{ github.actor == 'dependabot[bot]' }}
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
env:
|
|
||||||
GH_PAT: ${{ secrets.GH_PAT }}
|
|
||||||
steps:
|
|
||||||
- name: Checkout with token
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
if: ${{ env.GH_PAT != '' }}
|
|
||||||
with:
|
|
||||||
token: ${{ env.GH_PAT }}
|
|
||||||
|
|
||||||
- name: Checkout without token
|
|
||||||
uses: actions/checkout@v4
|
|
||||||
if: ${{ env.GH_PAT == '' }}
|
|
||||||
|
|
||||||
- uses: astral-sh/setup-uv@v6
|
|
||||||
- run: uv lock
|
|
||||||
- uses: stefanzweifel/git-auto-commit-action@v6
|
|
||||||
with:
|
|
||||||
commit_message: Regenerate uv.lock
|
|
13
.github/workflows/django-issue-checker.yml
vendored
13
.github/workflows/django-issue-checker.yml
vendored
|
@ -16,10 +16,15 @@ jobs:
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
- name: Install uv
|
- uses: actions/setup-python@v3
|
||||||
uses: astral-sh/setup-uv@v6
|
with:
|
||||||
|
python-version: "3.9"
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
pip install -r requirements.txt
|
||||||
- name: Create Django Major Issue
|
- name: Create Django Major Issue
|
||||||
run: uv run --frozen scripts/create_django_issue.py
|
run: python scripts/create_django_issue.py
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
9
.github/workflows/issue-manager.yml
vendored
9
.github/workflows/issue-manager.yml
vendored
|
@ -23,25 +23,18 @@ jobs:
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: tiangolo/issue-manager@0.5.1
|
- uses: tiangolo/issue-manager@0.4.0
|
||||||
with:
|
with:
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
config: >
|
config: >
|
||||||
{
|
{
|
||||||
"answered": {
|
"answered": {
|
||||||
"delay": 864000,
|
|
||||||
"message": "Assuming the question was answered, this will be automatically closed now."
|
"message": "Assuming the question was answered, this will be automatically closed now."
|
||||||
},
|
},
|
||||||
"solved": {
|
"solved": {
|
||||||
"delay": 864000,
|
|
||||||
"message": "Assuming the original issue was solved, it will be automatically closed now."
|
"message": "Assuming the original issue was solved, it will be automatically closed now."
|
||||||
},
|
},
|
||||||
"waiting": {
|
"waiting": {
|
||||||
"delay": 864000,
|
|
||||||
"message": "Automatically closing after waiting for additional info. To re-open, please provide the additional information requested."
|
"message": "Automatically closing after waiting for additional info. To re-open, please provide the additional information requested."
|
||||||
},
|
|
||||||
"wontfix": {
|
|
||||||
"delay": 864000,
|
|
||||||
"message": "As discussed, we won't be implementing this. Automatically closing."
|
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
|
|
14
.github/workflows/pre-commit-autoupdate.yml
vendored
14
.github/workflows/pre-commit-autoupdate.yml
vendored
|
@ -8,23 +8,17 @@ on:
|
||||||
- cron: "15 2 * * *"
|
- cron: "15 2 * * *"
|
||||||
workflow_dispatch: # to trigger manually
|
workflow_dispatch: # to trigger manually
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
auto-update:
|
auto-update:
|
||||||
# Disables this workflow from running in a repository that is not part of the indicated organization/user
|
# Disables this workflow from running in a repository that is not part of the indicated organization/user
|
||||||
if: github.repository_owner == 'cookiecutter'
|
if: github.repository_owner == 'cookiecutter'
|
||||||
permissions:
|
|
||||||
contents: write # for peter-evans/create-pull-request to create branch
|
|
||||||
pull-requests: write # for peter-evans/create-pull-request to create a PR
|
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
- uses: actions/setup-python@v5
|
- uses: actions/setup-python@v3
|
||||||
with:
|
with:
|
||||||
python-version: "3.12"
|
python-version: "3.9"
|
||||||
|
|
||||||
- name: Install pre-commit
|
- name: Install pre-commit
|
||||||
run: pip install pre-commit
|
run: pip install pre-commit
|
||||||
|
@ -37,7 +31,7 @@ jobs:
|
||||||
run: pre-commit autoupdate
|
run: pre-commit autoupdate
|
||||||
|
|
||||||
- name: Create Pull Request
|
- name: Create Pull Request
|
||||||
uses: peter-evans/create-pull-request@v7
|
uses: peter-evans/create-pull-request@v4
|
||||||
with:
|
with:
|
||||||
token: ${{ secrets.GITHUB_TOKEN }}
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
branch: update/pre-commit-autoupdate
|
branch: update/pre-commit-autoupdate
|
||||||
|
|
19
.github/workflows/update-changelog.yml
vendored
19
.github/workflows/update-changelog.yml
vendored
|
@ -8,20 +8,27 @@ on:
|
||||||
workflow_dispatch:
|
workflow_dispatch:
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
update:
|
release:
|
||||||
# Disables this workflow from running in a repository that is not part of the indicated organization/user
|
# Disables this workflow from running in a repository that is not part of the indicated organization/user
|
||||||
if: github.repository_owner == 'cookiecutter'
|
if: github.repository_owner == 'cookiecutter'
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
- name: Install uv
|
|
||||||
uses: astral-sh/setup-uv@v6
|
- name: Set up Python
|
||||||
|
uses: actions/setup-python@v3
|
||||||
|
with:
|
||||||
|
python-version: "3.9"
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
pip install -r requirements.txt
|
||||||
- name: Set git details
|
- name: Set git details
|
||||||
run: |
|
run: |
|
||||||
git config --global user.name "github-actions"
|
git config --global user.name "github-actions"
|
||||||
git config --global user.email "action@github.com"
|
git config --global user.email "action@github.com"
|
||||||
- name: Update changelog
|
- name: Update list
|
||||||
run: uv run --frozen scripts/update_changelog.py
|
run: python scripts/update_changelog.py
|
||||||
env:
|
env:
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
24
.github/workflows/update-contributors.yml
vendored
24
.github/workflows/update-contributors.yml
vendored
|
@ -5,28 +5,28 @@ on:
|
||||||
branches:
|
branches:
|
||||||
- master
|
- master
|
||||||
|
|
||||||
permissions:
|
|
||||||
contents: read
|
|
||||||
|
|
||||||
jobs:
|
jobs:
|
||||||
build:
|
build:
|
||||||
# Disables this workflow from running in a repository that is not part of the indicated organization/user
|
# Disables this workflow from running in a repository that is not part of the indicated organization/user
|
||||||
if: github.repository_owner == 'cookiecutter'
|
if: github.repository_owner == 'cookiecutter'
|
||||||
permissions:
|
|
||||||
contents: write # for stefanzweifel/git-auto-commit-action to push code in repo
|
|
||||||
|
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
- uses: actions/checkout@v4
|
- uses: actions/checkout@v3
|
||||||
- name: Install uv
|
|
||||||
uses: astral-sh/setup-uv@v6
|
- name: Set up Python
|
||||||
|
uses: actions/setup-python@v3
|
||||||
|
with:
|
||||||
|
python-version: "3.9"
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
pip install -r requirements.txt
|
||||||
- name: Update list
|
- name: Update list
|
||||||
run: uv run --frozen scripts/update_contributors.py
|
run: python scripts/update_contributors.py
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
||||||
|
|
||||||
- name: Commit changes
|
- name: Commit changes
|
||||||
uses: stefanzweifel/git-auto-commit-action@v6
|
uses: stefanzweifel/git-auto-commit-action@v4.14.0
|
||||||
with:
|
with:
|
||||||
commit_message: Update Contributors
|
commit_message: Update Contributors
|
||||||
file_pattern: CONTRIBUTORS.md .github/contributors.json
|
file_pattern: CONTRIBUTORS.md .github/contributors.json
|
||||||
|
|
|
@ -1,58 +1,28 @@
|
||||||
exclude: "{{cookiecutter.project_slug}}|.github/contributors.json|CHANGELOG.md|CONTRIBUTORS.md"
|
exclude: "{{cookiecutter.project_slug}}"
|
||||||
default_stages: [pre-commit]
|
default_stages: [commit]
|
||||||
minimum_pre_commit_version: "3.2.0"
|
|
||||||
|
|
||||||
default_language_version:
|
|
||||||
python: python3.12
|
|
||||||
|
|
||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
rev: v5.0.0
|
rev: v4.1.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: trailing-whitespace
|
- id: trailing-whitespace
|
||||||
- id: end-of-file-fixer
|
|
||||||
- id: check-json
|
|
||||||
- id: check-toml
|
|
||||||
- id: check-xml
|
|
||||||
- id: check-yaml
|
- id: check-yaml
|
||||||
- id: debug-statements
|
|
||||||
- id: check-builtin-literals
|
|
||||||
- id: check-case-conflict
|
|
||||||
- id: detect-private-key
|
|
||||||
|
|
||||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
|
||||||
rev: "v4.0.0-alpha.8"
|
|
||||||
hooks:
|
|
||||||
- id: prettier
|
|
||||||
args: ["--tab-width", "2"]
|
|
||||||
|
|
||||||
- repo: https://github.com/asottile/pyupgrade
|
|
||||||
rev: v3.20.0
|
|
||||||
hooks:
|
|
||||||
- id: pyupgrade
|
|
||||||
args: [--py312-plus]
|
|
||||||
exclude: hooks/
|
|
||||||
|
|
||||||
- repo: https://github.com/psf/black
|
- repo: https://github.com/psf/black
|
||||||
rev: 25.1.0
|
rev: 22.3.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: black
|
- id: black
|
||||||
|
|
||||||
- repo: https://github.com/PyCQA/isort
|
- repo: https://github.com/PyCQA/isort
|
||||||
rev: 6.0.1
|
rev: 5.10.1
|
||||||
hooks:
|
hooks:
|
||||||
- id: isort
|
- id: isort
|
||||||
|
|
||||||
- repo: https://github.com/PyCQA/flake8
|
- repo: https://github.com/PyCQA/flake8
|
||||||
rev: 7.3.0
|
rev: 4.0.1
|
||||||
hooks:
|
hooks:
|
||||||
- id: flake8
|
- id: flake8
|
||||||
|
|
||||||
- repo: https://github.com/tox-dev/pyproject-fmt
|
|
||||||
rev: "v2.6.0"
|
|
||||||
hooks:
|
|
||||||
- id: pyproject-fmt
|
|
||||||
|
|
||||||
ci:
|
ci:
|
||||||
autoupdate_schedule: weekly
|
autoupdate_schedule: weekly
|
||||||
skip: []
|
skip: []
|
||||||
|
|
|
@ -14,6 +14,8 @@ pin: True
|
||||||
label_prs: update
|
label_prs: update
|
||||||
|
|
||||||
requirements:
|
requirements:
|
||||||
|
- "requirements.txt"
|
||||||
|
- "docs/requirements.txt"
|
||||||
- "{{cookiecutter.project_slug}}/requirements/base.txt"
|
- "{{cookiecutter.project_slug}}/requirements/base.txt"
|
||||||
- "{{cookiecutter.project_slug}}/requirements/local.txt"
|
- "{{cookiecutter.project_slug}}/requirements/local.txt"
|
||||||
- "{{cookiecutter.project_slug}}/requirements/production.txt"
|
- "{{cookiecutter.project_slug}}/requirements/production.txt"
|
||||||
|
|
|
@ -4,18 +4,12 @@
|
||||||
# Required
|
# Required
|
||||||
version: 2
|
version: 2
|
||||||
|
|
||||||
# Set the version of Python and other tools you might need
|
|
||||||
build:
|
|
||||||
os: ubuntu-22.04
|
|
||||||
tools:
|
|
||||||
python: "3.12"
|
|
||||||
commands:
|
|
||||||
- asdf plugin add uv
|
|
||||||
- asdf install uv latest
|
|
||||||
- asdf global uv latest
|
|
||||||
- uv sync --only-group docs --frozen
|
|
||||||
- uv run -m sphinx -T -b html -d docs/_build/doctrees -D language=en docs $READTHEDOCS_OUTPUT/html
|
|
||||||
|
|
||||||
# Build documentation in the docs/ directory with Sphinx
|
# Build documentation in the docs/ directory with Sphinx
|
||||||
sphinx:
|
sphinx:
|
||||||
configuration: docs/conf.py
|
configuration: docs/conf.py
|
||||||
|
|
||||||
|
# Version of Python and requirements required to build the docs
|
||||||
|
python:
|
||||||
|
version: "3.8"
|
||||||
|
install:
|
||||||
|
- requirements: docs/requirements.txt
|
||||||
|
|
5344
CHANGELOG.md
5344
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
|
@ -1,3 +1,3 @@
|
||||||
## Code of Conduct
|
## Code of Conduct
|
||||||
|
|
||||||
Everyone who interacts in the Cookiecutter project's codebase, issue trackers, chat rooms, and mailing lists is expected to follow the [PSF Code of Conduct](https://www.python.org/psf/conduct/)
|
Everyone who interacts in the Cookiecutter project's codebase, issue trackers, chat rooms, and mailing lists is expected to follow the [PyPA Code of Conduct](https://www.pypa.io/en/latest/code-of-conduct/).
|
||||||
|
|
|
@ -2,68 +2,41 @@
|
||||||
|
|
||||||
Always happy to get issues identified and pull requests!
|
Always happy to get issues identified and pull requests!
|
||||||
|
|
||||||
## General considerations
|
## Getting your pull request merged in
|
||||||
|
|
||||||
1. Keep it small. The smaller the change, the more likely we are to accept.
|
1. Keep it small. The smaller the pull request, the more likely we are to accept.
|
||||||
2. Changes that fix a current issue get priority for review.
|
2. Pull requests that fix a current issue get priority for review.
|
||||||
3. Check out [GitHub guide][submit-a-pr] if you've never created a pull request before.
|
|
||||||
|
|
||||||
## Getting started
|
|
||||||
|
|
||||||
1. Fork the repo
|
|
||||||
2. Clone your fork
|
|
||||||
3. Create a branch for your changes
|
|
||||||
|
|
||||||
This last step is very important, don't start developing from master, it'll cause pain if you need to send another change later.
|
|
||||||
|
|
||||||
## Testing
|
## Testing
|
||||||
|
|
||||||
You'll need to run the tests using Python 3.12. We recommend using [tox](https://tox.readthedocs.io/en/latest/) to run the tests. It will automatically create a fresh virtual environment and install our test dependencies, such as [pytest-cookies](https://pypi.python.org/pypi/pytest-cookies/) and [flake8](https://pypi.python.org/pypi/flake8/).
|
|
||||||
|
|
||||||
We'll also run the tests on GitHub actions when you send your pull request, but it's a good idea to run them locally before you send it.
|
|
||||||
|
|
||||||
### Installation
|
### Installation
|
||||||
|
|
||||||
We use uv to manage our environment and manage our Python installation. You can install it following the instructions at https://docs.astral.sh/uv/getting-started/installation/
|
Please install [tox](https://tox.readthedocs.io/en/latest/), which is a generic virtualenv management and test command line tool.
|
||||||
|
|
||||||
### Run the template's test suite
|
[tox](https://tox.readthedocs.io/en/latest/) is available for download from [PyPI](https://pypi.python.org/pypi) via [pip](https://pypi.python.org/pypi/pip/):
|
||||||
|
|
||||||
To run the tests of the template using the current Python version:
|
$ pip install tox
|
||||||
|
|
||||||
```bash
|
It will automatically create a fresh virtual environment and install our test dependencies,
|
||||||
$ uv run tox run -e py
|
such as [pytest-cookies](https://pypi.python.org/pypi/pytest-cookies/) and [flake8](https://pypi.python.org/pypi/flake8/).
|
||||||
```
|
|
||||||
|
|
||||||
This uses `pytest `under the hood, and you can pass options to it after a `--`. So to run a particular test:
|
### Run the Tests
|
||||||
|
|
||||||
```bash
|
Tox uses pytest under the hood, hence it supports the same syntax for selecting tests.
|
||||||
$ uv run tox run -e py -- -k test_default_configuration
|
|
||||||
```
|
|
||||||
|
|
||||||
For further information, please consult the [pytest usage docs](https://pytest.org/en/latest/how-to/usage.html#specifying-which-tests-to-run).
|
For further information please consult the [pytest usage docs](https://pytest.org/latest/usage.html#specifying-tests-selecting-tests).
|
||||||
|
|
||||||
### Run the generated project tests
|
To run all tests using various versions of python in virtualenvs defined in tox.ini, just run tox.:
|
||||||
|
|
||||||
The template tests are checking that the generated project is fully rendered and that it passes `flake8`. We also have some test scripts which generate a specific project combination, install the dependencies, run the tests of the generated project, install FE dependencies and generate the docs. They will install the template dependencies, so make sure you create and activate a virtual environment first.
|
$ tox
|
||||||
|
|
||||||
```bash
|
It is possible to test with a specific version of python. To do this, the command
|
||||||
$ python -m venv venv
|
is:
|
||||||
$ source venv/bin/activate
|
|
||||||
```
|
|
||||||
|
|
||||||
These tests are slower and can be run with or without Docker:
|
$ tox -e py39
|
||||||
|
|
||||||
- Without Docker: `tests/test_bare.sh` (for bare metal)
|
This will run pytest with the python3.9 interpreter, for example.
|
||||||
- With Docker: `tests/test_docker.sh`
|
|
||||||
|
|
||||||
All arguments to these scripts will be passed to the `cookiecutter` CLI, letting you set options, for example:
|
To run a particular test with tox for against your current Python version:
|
||||||
|
|
||||||
```bash
|
$ tox -e py -- -k test_default_configuration
|
||||||
$ tests/test_bare.sh use_celery=y
|
|
||||||
```
|
|
||||||
|
|
||||||
## Submitting a pull request
|
|
||||||
|
|
||||||
Once you're happy with your changes and they look ok locally, push and send send [a pull request][submit-a-pr] to the main repo, which will trigger the tests on GitHub actions. If they fail, try to fix them. A maintainer should take a look at your change and give you feedback or merge it.
|
|
||||||
|
|
||||||
[submit-a-pr]: https://docs.github.com/en/pull-requests/collaborating-with-pull-requests/proposing-changes-to-your-work-with-pull-requests/creating-a-pull-request
|
|
||||||
|
|
693
CONTRIBUTORS.md
693
CONTRIBUTORS.md
File diff suppressed because it is too large
Load Diff
162
README.md
162
README.md
|
@ -1,79 +1,79 @@
|
||||||
# Cookiecutter Django
|
# Cookiecutter Django
|
||||||
|
|
||||||
[](https://github.com/cookiecutter/cookiecutter-django/actions/workflows/ci.yml?query=branch%3Amaster)
|
[](https://github.com/cookiecutter/cookiecutter-django/actions?query=workflow%3ACI)
|
||||||
[](https://cookiecutter-django.readthedocs.io/en/latest/?badge=latest)
|
[](https://cookiecutter-django.readthedocs.io/en/latest/?badge=latest)
|
||||||
[](https://results.pre-commit.ci/latest/github/cookiecutter/cookiecutter-django/master)
|
|
||||||
[](https://github.com/ambv/black)
|
|
||||||
|
|
||||||
[](https://pyup.io/repos/github/cookiecutter/cookiecutter-django/)
|
[](https://pyup.io/repos/github/cookiecutter/cookiecutter-django/)
|
||||||
[](https://discord.gg/rAWFUP47d2)
|
[](https://discord.gg/uFXweDQc5a)
|
||||||
[](https://www.codetriage.com/cookiecutter/cookiecutter-django)
|
[](https://www.codetriage.com/cookiecutter/cookiecutter-django)
|
||||||
|
[](https://github.com/ambv/black)
|
||||||
|
|
||||||
Powered by [Cookiecutter](https://github.com/cookiecutter/cookiecutter), Cookiecutter Django is a framework for jumpstarting
|
Powered by [Cookiecutter](https://github.com/cookiecutter/cookiecutter), Cookiecutter Django is a framework for jumpstarting
|
||||||
production-ready Django projects quickly.
|
production-ready Django projects quickly.
|
||||||
|
|
||||||
- Documentation: <https://cookiecutter-django.readthedocs.io/en/latest/>
|
- Documentation: <https://cookiecutter-django.readthedocs.io/en/latest/>
|
||||||
- See [Troubleshooting](https://cookiecutter-django.readthedocs.io/en/latest/5-help/troubleshooting.html) for common errors and obstacles
|
- See [Troubleshooting](https://cookiecutter-django.readthedocs.io/en/latest/troubleshooting.html) for common errors and obstacles
|
||||||
- If you have problems with Cookiecutter Django, please open [issues](https://github.com/cookiecutter/cookiecutter-django/issues/new) don't send
|
- If you have problems with Cookiecutter Django, please open [issues](https://github.com/cookiecutter/cookiecutter-django/issues/new) don't send
|
||||||
emails to the maintainers.
|
emails to the maintainers.
|
||||||
|
|
||||||
## Features
|
## Features
|
||||||
|
|
||||||
- For Django 5.1
|
- For Django 3.2
|
||||||
- Works with Python 3.12
|
- Works with Python 3.9
|
||||||
- Renders Django projects with 100% starting test coverage
|
- Renders Django projects with 100% starting test coverage
|
||||||
- Twitter [Bootstrap](https://github.com/twbs/bootstrap) v5
|
- Twitter [Bootstrap](https://github.com/twbs/bootstrap) v5
|
||||||
- [12-Factor](https://12factor.net) based settings via [django-environ](https://github.com/joke2k/django-environ)
|
- [12-Factor](http://12factor.net/) based settings via [django-environ](https://github.com/joke2k/django-environ)
|
||||||
- Secure by default. We believe in SSL.
|
- Secure by default. We believe in SSL.
|
||||||
- Optimized development and production settings
|
- Optimized development and production settings
|
||||||
- Registration via [django-allauth](https://github.com/pennersr/django-allauth)
|
- Registration via [django-allauth](https://github.com/pennersr/django-allauth)
|
||||||
- Comes with custom user model ready to go
|
- Comes with custom user model ready to go
|
||||||
- Optional basic ASGI setup for Websockets
|
- Optional basic ASGI setup for Websockets
|
||||||
- Optional custom static build using Gulp or Webpack
|
- Optional custom static build using Gulp and livereload
|
||||||
- Send emails via [Anymail](https://github.com/anymail/django-anymail) (using [Mailgun](http://www.mailgun.com/) by default or Amazon SES if AWS is selected cloud provider, but switchable)
|
- Send emails via [Anymail](https://github.com/anymail/django-anymail) (using [Mailgun](http://www.mailgun.com/) by default or Amazon SES if AWS is selected cloud provider, but switchable)
|
||||||
- Media storage using Amazon S3, Google Cloud Storage, Azure Storage or nginx
|
- Media storage using Amazon S3 or Google Cloud Storage
|
||||||
- Docker support using [docker-compose](https://github.com/docker/compose) for development and production (using [Traefik](https://traefik.io/) with [LetsEncrypt](https://letsencrypt.org/) support)
|
- Docker support using [docker-compose](https://github.com/docker/compose) for development and production (using [Traefik](https://traefik.io/) with [LetsEncrypt](https://letsencrypt.org/) support)
|
||||||
- [Procfile](https://devcenter.heroku.com/articles/procfile) for deploying to Heroku
|
- [Procfile](https://devcenter.heroku.com/articles/procfile) for deploying to Heroku
|
||||||
- Instructions for deploying to [PythonAnywhere](https://www.pythonanywhere.com/)
|
- Instructions for deploying to [PythonAnywhere](https://www.pythonanywhere.com/)
|
||||||
- Run tests with unittest or pytest
|
- Run tests with unittest or pytest
|
||||||
- Customizable PostgreSQL version
|
- Customizable PostgreSQL version
|
||||||
- Default integration with [pre-commit](https://github.com/pre-commit/pre-commit) for identifying simple issues before submission to code review
|
- Default integration with [pre-commit](https://github.com/pre-commit/pre-commit) for identifying simple issues before submission to code review
|
||||||
|
|
||||||
## Optional Integrations
|
## Optional Integrations
|
||||||
|
|
||||||
_These features can be enabled during initial project setup._
|
*These features can be enabled during initial project setup.*
|
||||||
|
|
||||||
- Serve static files from Amazon S3, Google Cloud Storage, Azure Storage or [Whitenoise](https://whitenoise.readthedocs.io/)
|
- Serve static files from Amazon S3, Google Cloud Storage or [Whitenoise](https://whitenoise.readthedocs.io/)
|
||||||
- Configuration for [Celery](https://docs.celeryq.dev) and [Flower](https://github.com/mher/flower) (the latter in Docker setup only)
|
- Configuration for [Celery](https://docs.celeryq.dev) and [Flower](https://github.com/mher/flower) (the latter in Docker setup only)
|
||||||
- Integration with [Mailpit](https://github.com/axllent/mailpit/) for local email testing
|
- Integration with [MailHog](https://github.com/mailhog/MailHog) for local email testing
|
||||||
- Integration with [Sentry](https://sentry.io/welcome/) for error logging
|
- Integration with [Sentry](https://sentry.io/welcome/) for error logging
|
||||||
|
|
||||||
## Constraints
|
## Constraints
|
||||||
|
|
||||||
- Only maintained 3rd party libraries are used.
|
- Only maintained 3rd party libraries are used.
|
||||||
- Uses PostgreSQL everywhere: 13 - 17 ([MySQL fork](https://github.com/mabdullahadeel/cookiecutter-django-mysql) also available).
|
- Uses PostgreSQL everywhere: 10.19 - 14.1 ([MySQL fork](https://github.com/mabdullahadeel/cookiecutter-django-mysql) also available).
|
||||||
- Environment variables for configuration (This won't work with Apache/mod_wsgi).
|
- Environment variables for configuration (This won't work with Apache/mod_wsgi).
|
||||||
|
|
||||||
## Support this Project!
|
## Support this Project!
|
||||||
|
|
||||||
This project is an open source project run by volunteers. You can sponsor us via [OpenCollective](https://opencollective.com/cookiecutter-django) or individually via GitHub Sponsors:
|
This project is run by volunteers. Please support them in their efforts to maintain and improve Cookiecutter Django:
|
||||||
|
|
||||||
- Daniel Roy Greenfeld, Project Lead ([GitHub](https://github.com/pydanny), [Patreon](https://www.patreon.com/danielroygreenfeld)): expertise in Django and AWS ELB.
|
- Daniel Roy Greenfeld, Project Lead ([GitHub](https://github.com/pydanny), [Patreon](https://www.patreon.com/danielroygreenfeld)): expertise in Django and AWS ELB.
|
||||||
- Fabio C. Barrionuevo, Core Developer ([GitHub](https://github.com/luzfcb)): expertise in Python/Django, hands-on DevOps and frontend experience.
|
- Nikita Shupeyko, Core Developer ([GitHub](https://github.com/webyneter)): expertise in Python/Django, hands-on DevOps and frontend experience.
|
||||||
- Bruno Alla, Core Developer ([GitHub](https://github.com/browniebroke)): expertise in Python/Django and DevOps.
|
|
||||||
- Nikita Shupeyko, Core Developer ([GitHub](https://github.com/webyneter)): expertise in Python/Django, hands-on DevOps and frontend experience.
|
|
||||||
|
|
||||||
Projects that provide financial support to the maintainers:
|
Projects that provide financial support to the maintainers:
|
||||||
|
|
||||||
### Two Scoops of Django
|
------------------------------------------------------------------------
|
||||||
|
|
||||||
[](https://www.feldroy.com/two-scoops-press#two-scoops-of-django)
|
<p align="center">
|
||||||
|
<a href="https://www.feldroy.com/products//two-scoops-of-django-3-x"><img src="https://cdn.shopify.com/s/files/1/0304/6901/products/Two-Scoops-of-Django-3-Alpha-Cover_540x_26507b15-e489-470b-8a97-02773dd498d1_1080x.jpg"></a>
|
||||||
|
</p>
|
||||||
|
|
||||||
Two Scoops of Django 3.x is the best ice cream-themed Django reference in the universe!
|
Two Scoops of Django 3.x is the best ice cream-themed Django reference in the universe!
|
||||||
|
|
||||||
### PyUp
|
### PyUp
|
||||||
|
|
||||||
[](https://pyup.io)
|
<p align="center">
|
||||||
|
<a href="https://pyup.io/"><img src="https://pyup.io/static/images/logo.png"></a>
|
||||||
|
</p>
|
||||||
|
|
||||||
PyUp brings you automated security and dependency updates used by Google and other organizations. Free for open source projects!
|
PyUp brings you automated security and dependency updates used by Google and other organizations. Free for open source projects!
|
||||||
|
|
||||||
|
@ -94,7 +94,7 @@ You'll be prompted for some values. Provide them, then a Django project will be
|
||||||
|
|
||||||
**Warning**: After this point, change 'Daniel Greenfeld', 'pydanny', etc to your own information.
|
**Warning**: After this point, change 'Daniel Greenfeld', 'pydanny', etc to your own information.
|
||||||
|
|
||||||
Answer the prompts with your own desired [options](http://cookiecutter-django.readthedocs.io/en/latest/1-getting-started/project-generation-options.html). For example:
|
Answer the prompts with your own desired [options](http://cookiecutter-django.readthedocs.io/en/latest/project-generation-options.html). For example:
|
||||||
|
|
||||||
Cloning into 'cookiecutter-django'...
|
Cloning into 'cookiecutter-django'...
|
||||||
remote: Counting objects: 550, done.
|
remote: Counting objects: 550, done.
|
||||||
|
@ -116,24 +116,16 @@ Answer the prompts with your own desired [options](http://cookiecutter-django.re
|
||||||
4 - Apache Software License 2.0
|
4 - Apache Software License 2.0
|
||||||
5 - Not open source
|
5 - Not open source
|
||||||
Choose from 1, 2, 3, 4, 5 [1]: 1
|
Choose from 1, 2, 3, 4, 5 [1]: 1
|
||||||
Select username_type:
|
|
||||||
1 - username
|
|
||||||
2 - email
|
|
||||||
Choose from 1, 2 [1]: 1
|
|
||||||
timezone [UTC]: America/Los_Angeles
|
timezone [UTC]: America/Los_Angeles
|
||||||
windows [n]: n
|
windows [n]: n
|
||||||
Select an editor to use. The choices are:
|
use_pycharm [n]: y
|
||||||
1 - None
|
|
||||||
2 - PyCharm
|
|
||||||
3 - VS Code
|
|
||||||
Choose from 1, 2, 3 [1]: 1
|
|
||||||
use_docker [n]: n
|
use_docker [n]: n
|
||||||
Select postgresql_version:
|
Select postgresql_version:
|
||||||
1 - 17
|
1 - 14.1
|
||||||
2 - 16
|
2 - 13.5
|
||||||
3 - 15
|
3 - 12.9
|
||||||
4 - 14
|
4 - 11.14
|
||||||
5 - 13
|
5 - 10.19
|
||||||
Choose from 1, 2, 3, 4, 5 [1]: 1
|
Choose from 1, 2, 3, 4, 5 [1]: 1
|
||||||
Select cloud_provider:
|
Select cloud_provider:
|
||||||
1 - AWS
|
1 - AWS
|
||||||
|
@ -147,7 +139,7 @@ Answer the prompts with your own desired [options](http://cookiecutter-django.re
|
||||||
4 - Mandrill
|
4 - Mandrill
|
||||||
5 - Postmark
|
5 - Postmark
|
||||||
6 - Sendgrid
|
6 - Sendgrid
|
||||||
7 - Brevo (formerly SendinBlue)
|
7 - SendinBlue
|
||||||
8 - SparkPost
|
8 - SparkPost
|
||||||
9 - Other SMTP
|
9 - Other SMTP
|
||||||
Choose from 1, 2, 3, 4, 5, 6, 7, 8, 9 [1]: 1
|
Choose from 1, 2, 3, 4, 5, 6, 7, 8, 9 [1]: 1
|
||||||
|
@ -157,10 +149,9 @@ Answer the prompts with your own desired [options](http://cookiecutter-django.re
|
||||||
1 - None
|
1 - None
|
||||||
2 - Django Compressor
|
2 - Django Compressor
|
||||||
3 - Gulp
|
3 - Gulp
|
||||||
4 - Webpack
|
|
||||||
Choose from 1, 2, 3, 4 [1]: 1
|
Choose from 1, 2, 3, 4 [1]: 1
|
||||||
use_celery [n]: y
|
use_celery [n]: y
|
||||||
use_mailpit [n]: n
|
use_mailhog [n]: n
|
||||||
use_sentry [n]: y
|
use_sentry [n]: y
|
||||||
use_whitenoise [n]: n
|
use_whitenoise [n]: n
|
||||||
use_heroku [n]: y
|
use_heroku [n]: y
|
||||||
|
@ -190,16 +181,14 @@ Now take a look at your repo. Don't forget to carefully look at the generated RE
|
||||||
|
|
||||||
For local development, see the following:
|
For local development, see the following:
|
||||||
|
|
||||||
- [Developing locally](https://cookiecutter-django.readthedocs.io/en/latest/2-local-development/developing-locally.html)
|
- [Developing locally](http://cookiecutter-django.readthedocs.io/en/latest/developing-locally.html)
|
||||||
- [Developing locally using docker](https://cookiecutter-django.readthedocs.io/en/latest/2-local-development/developing-locally-docker.html)
|
- [Developing locally using docker](http://cookiecutter-django.readthedocs.io/en/latest/developing-locally-docker.html)
|
||||||
|
|
||||||
## Community
|
## Community
|
||||||
|
|
||||||
- Have questions? **Before you ask questions anywhere else**, please post your question on [Stack Overflow](http://stackoverflow.com/questions/tagged/cookiecutter-django) under the _cookiecutter-django_ tag. We check there periodically for questions.
|
- Have questions? **Before you ask questions anywhere else**, please post your question on [Stack Overflow](http://stackoverflow.com/questions/tagged/cookiecutter-django) under the *cookiecutter-django* tag. We check there periodically for questions.
|
||||||
- If you think you found a bug or want to request a feature, please open an [issue](https://github.com/cookiecutter/cookiecutter-django/issues).
|
- If you think you found a bug or want to request a feature, please open an [issue](https://github.com/cookiecutter/cookiecutter-django/issues).
|
||||||
- For anything else, you can chat with us on [Discord](https://discord.gg/uFXweDQc5a).
|
- For anything else, you can chat with us on [Discord](https://discord.gg/uFXweDQc5a).
|
||||||
|
|
||||||
<img src="https://opencollective.com/cookiecutter-django/contributors.svg?width=890&button=false" alt="Contributors">
|
|
||||||
|
|
||||||
## For Readers of Two Scoops of Django
|
## For Readers of Two Scoops of Django
|
||||||
|
|
||||||
|
@ -207,14 +196,13 @@ You may notice that some elements of this project do not exactly match what we d
|
||||||
|
|
||||||
## For PyUp Users
|
## For PyUp Users
|
||||||
|
|
||||||
If you are using [PyUp](https://pyup.io) to keep your dependencies updated and secure, use the code _cookiecutter_ during checkout to get 15% off every month.
|
If you are using [PyUp](https://pyup.io) to keep your dependencies updated and secure, use the code *cookiecutter* during checkout to get 15% off every month.
|
||||||
|
|
||||||
## "Your Stuff"
|
## "Your Stuff"
|
||||||
|
|
||||||
Scattered throughout the Python and HTML of this project are places marked with "your stuff". This is where third-party libraries are to be integrated with your project.
|
Scattered throughout the Python and HTML of this project are places marked with "your stuff". This is where third-party libraries are to be integrated with your project.
|
||||||
|
|
||||||
## For MySQL users
|
## For MySQL users
|
||||||
|
|
||||||
To get full MySQL support in addition to the default Postgresql, you can use this fork of the cookiecutter-django:
|
To get full MySQL support in addition to the default Postgresql, you can use this fork of the cookiecutter-django:
|
||||||
https://github.com/mabdullahadeel/cookiecutter-django-mysql
|
https://github.com/mabdullahadeel/cookiecutter-django-mysql
|
||||||
|
|
||||||
|
@ -224,18 +212,18 @@ Need a stable release? You can find them at <https://github.com/cookiecutter/coo
|
||||||
|
|
||||||
## Not Exactly What You Want?
|
## Not Exactly What You Want?
|
||||||
|
|
||||||
This is what I want. _It might not be what you want._ Don't worry, you have options:
|
This is what I want. *It might not be what you want.* Don't worry, you have options:
|
||||||
|
|
||||||
### Fork This
|
### Fork This
|
||||||
|
|
||||||
If you have differences in your preferred setup, I encourage you to fork this to create your own version.
|
If you have differences in your preferred setup, I encourage you to fork this to create your own version.
|
||||||
Once you have your fork working, let me know and I'll add it to a '_Similar Cookiecutter Templates_' list here.
|
Once you have your fork working, let me know and I'll add it to a '*Similar Cookiecutter Templates*' list here.
|
||||||
It's up to you whether to rename your fork.
|
It's up to you whether to rename your fork.
|
||||||
|
|
||||||
If you do rename your fork, I encourage you to submit it to the following places:
|
If you do rename your fork, I encourage you to submit it to the following places:
|
||||||
|
|
||||||
- [cookiecutter](https://github.com/cookiecutter/cookiecutter) so it gets listed in the README as a template.
|
- [cookiecutter](https://github.com/cookiecutter/cookiecutter) so it gets listed in the README as a template.
|
||||||
- The cookiecutter [grid](https://www.djangopackages.com/grids/g/cookiecutters/) on Django Packages.
|
- The cookiecutter [grid](https://www.djangopackages.com/grids/g/cookiecutters/) on Django Packages.
|
||||||
|
|
||||||
### Submit a Pull Request
|
### Submit a Pull Request
|
||||||
|
|
||||||
|
@ -244,19 +232,15 @@ experience better.
|
||||||
|
|
||||||
## Articles
|
## Articles
|
||||||
|
|
||||||
- [Why cookiecutter-django is Essential for Your Next Django Project](https://medium.com/@millsks/why-cookiecutter-django-is-essential-for-your-next-django-project-7d3c00cdce51) - Aug. 4, 2024
|
- [Using cookiecutter-django with Google Cloud Storage](https://ahhda.github.io/cloud/gce/django/2019/03/12/using-django-cookiecutter-cloud-storage.html) - Mar. 12, 2019
|
||||||
- [How to Make Your Own Django Cookiecutter Template!](https://medium.com/@FatemeFouladkar/how-to-make-your-own-django-cookiecutter-template-a753d4cbb8c2) - Aug. 10, 2023
|
- [cookiecutter-django with Nginx, Route 53 and ELB](https://msaizar.com/blog/cookiecutter-django-nginx-route-53-and-elb/) - Feb. 12, 2018
|
||||||
- [Cookiecutter Django With Amazon RDS](https://haseeburrehman.com/posts/cookiecutter-django-with-amazon-rds/) - Apr, 2, 2021
|
- [cookiecutter-django and Amazon RDS](https://msaizar.com/blog/cookiecutter-django-and-amazon-rds/) - Feb. 7, 2018
|
||||||
- [Complete Walkthrough: Blue/Green Deployment to AWS ECS using GitHub actions](https://github.com/Andrew-Chen-Wang/cookiecutter-django-ecs-github) - June 10, 2020
|
- [Using Cookiecutter to Jumpstart a Django Project on Windows with PyCharm](https://joshuahunter.com/posts/using-cookiecutter-to-jumpstart-a-django-project-on-windows-with-pycharm/) - May 19, 2017
|
||||||
- [Using cookiecutter-django with Google Cloud Storage](https://ahhda.github.io/cloud/gce/django/2019/03/12/using-django-cookiecutter-cloud-storage.html) - Mar. 12, 2019
|
- [Exploring with Cookiecutter](http://www.snowboardingcoder.com/django/2016/12/03/exploring-with-cookiecutter/) - Dec. 3, 2016
|
||||||
- [cookiecutter-django with Nginx, Route 53 and ELB](https://msaizar.com/blog/cookiecutter-django-nginx-route-53-and-elb/) - Feb. 12, 2018
|
- [Introduction to Cookiecutter-Django](http://krzysztofzuraw.com/blog/2016/django-cookiecutter.html) - Feb. 19, 2016
|
||||||
- [cookiecutter-django and Amazon RDS](https://msaizar.com/blog/cookiecutter-django-and-amazon-rds/) - Feb. 7, 2018
|
- [Django and GitLab - Running Continuous Integration and tests with your FREE account](http://dezoito.github.io/2016/05/11/django-gitlab-continuous-integration-phantomjs.html) - May. 11, 2016
|
||||||
- [Using Cookiecutter to Jumpstart a Django Project on Windows with PyCharm](https://joshuahunter.com/posts/using-cookiecutter-to-jumpstart-a-django-project-on-windows-with-pycharm/) - May 19, 2017
|
- [Development and Deployment of Cookiecutter-Django on Fedora](https://realpython.com/blog/python/development-and-deployment-of-cookiecutter-django-on-fedora/) - Jan. 18, 2016
|
||||||
- [Exploring with Cookiecutter](http://www.snowboardingcoder.com/django/2016/12/03/exploring-with-cookiecutter/) - Dec. 3, 2016
|
- [Development and Deployment of Cookiecutter-Django via Docker](https://realpython.com/blog/python/development-and-deployment-of-cookiecutter-django-via-docker/) - Dec. 29, 2015
|
||||||
- [Introduction to Cookiecutter-Django](http://krzysztofzuraw.com/blog/2016/django-cookiecutter.html) - Feb. 19, 2016
|
- [How to create a Django Application using Cookiecutter and Django 1.8](https://www.swapps.io/blog/how-to-create-a-django-application-using-cookiecutter-and-django-1-8/) - Sept. 12, 2015
|
||||||
- [Django and GitLab - Running Continuous Integration and tests with your FREE account](http://dezoito.github.io/2016/05/11/django-gitlab-continuous-integration-phantomjs.html) - May. 11, 2016
|
|
||||||
- [Development and Deployment of Cookiecutter-Django on Fedora](https://realpython.com/blog/python/development-and-deployment-of-cookiecutter-django-on-fedora/) - Jan. 18, 2016
|
|
||||||
- [Development and Deployment of Cookiecutter-Django via Docker](https://realpython.com/blog/python/development-and-deployment-of-cookiecutter-django-via-docker/) - Dec. 29, 2015
|
|
||||||
- [How to create a Django Application using Cookiecutter and Django 1.8](https://www.swapps.io/blog/how-to-create-a-django-application-using-cookiecutter-and-django-1-8/) - Sept. 12, 2015
|
|
||||||
|
|
||||||
Have a blog or online publication? Write about your cookiecutter-django tips and tricks, then send us a pull request with the link.
|
Have a blog or online publication? Write about your cookiecutter-django tips and tricks, then send us a pull request with the link.
|
||||||
|
|
|
@ -4,7 +4,7 @@
|
||||||
"description": "Behold My Awesome Project!",
|
"description": "Behold My Awesome Project!",
|
||||||
"author_name": "Daniel Roy Greenfeld",
|
"author_name": "Daniel Roy Greenfeld",
|
||||||
"domain_name": "example.com",
|
"domain_name": "example.com",
|
||||||
"email": "{{ cookiecutter.author_name.lower() | trim() |replace(' ', '-') }}@{{ cookiecutter.domain_name.lower() | trim() }}",
|
"email": "{{ cookiecutter.author_name.lower()|replace(' ', '-') }}@example.com",
|
||||||
"version": "0.1.0",
|
"version": "0.1.0",
|
||||||
"open_source_license": [
|
"open_source_license": [
|
||||||
"MIT",
|
"MIT",
|
||||||
|
@ -13,13 +13,22 @@
|
||||||
"Apache Software License 2.0",
|
"Apache Software License 2.0",
|
||||||
"Not open source"
|
"Not open source"
|
||||||
],
|
],
|
||||||
"username_type": ["username", "email"],
|
|
||||||
"timezone": "UTC",
|
"timezone": "UTC",
|
||||||
"windows": "n",
|
"windows": "n",
|
||||||
"editor": ["None", "PyCharm", "VS Code"],
|
"use_pycharm": "n",
|
||||||
"use_docker": "n",
|
"use_docker": "n",
|
||||||
"postgresql_version": ["17", "16", "15", "14", "13"],
|
"postgresql_version": [
|
||||||
"cloud_provider": ["AWS", "GCP", "Azure", "None"],
|
"14.1",
|
||||||
|
"13.5",
|
||||||
|
"12.9",
|
||||||
|
"11.14",
|
||||||
|
"10.19"
|
||||||
|
],
|
||||||
|
"cloud_provider": [
|
||||||
|
"AWS",
|
||||||
|
"GCP",
|
||||||
|
"None"
|
||||||
|
],
|
||||||
"mail_service": [
|
"mail_service": [
|
||||||
"Mailgun",
|
"Mailgun",
|
||||||
"Amazon SES",
|
"Amazon SES",
|
||||||
|
@ -27,19 +36,28 @@
|
||||||
"Mandrill",
|
"Mandrill",
|
||||||
"Postmark",
|
"Postmark",
|
||||||
"Sendgrid",
|
"Sendgrid",
|
||||||
"Brevo",
|
"SendinBlue",
|
||||||
"SparkPost",
|
"SparkPost",
|
||||||
"Other SMTP"
|
"Other SMTP"
|
||||||
],
|
],
|
||||||
"use_async": "n",
|
"use_async": "n",
|
||||||
"use_drf": "n",
|
"use_drf": "n",
|
||||||
"frontend_pipeline": ["None", "Django Compressor", "Gulp", "Webpack"],
|
"frontend_pipeline": [
|
||||||
|
"None",
|
||||||
|
"Django Compressor",
|
||||||
|
"Gulp"
|
||||||
|
],
|
||||||
"use_celery": "n",
|
"use_celery": "n",
|
||||||
"use_mailpit": "n",
|
"use_mailhog": "n",
|
||||||
"use_sentry": "n",
|
"use_sentry": "n",
|
||||||
"use_whitenoise": "n",
|
"use_whitenoise": "n",
|
||||||
"use_heroku": "n",
|
"use_heroku": "n",
|
||||||
"ci_tool": ["None", "Travis", "Gitlab", "Github", "Drone"],
|
"ci_tool": [
|
||||||
|
"None",
|
||||||
|
"Travis",
|
||||||
|
"Gitlab",
|
||||||
|
"Github"
|
||||||
|
],
|
||||||
"keep_local_envs_in_vcs": "y",
|
"keep_local_envs_in_vcs": "y",
|
||||||
"debug": "n"
|
"debug": "n"
|
||||||
}
|
}
|
||||||
|
|
|
@ -1,340 +0,0 @@
|
||||||
Getting Up and Running Locally With Docker
|
|
||||||
==========================================
|
|
||||||
|
|
||||||
.. index:: Docker
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
If you're new to Docker, please be aware that some resources are cached system-wide
|
|
||||||
and might reappear if you generate a project multiple times with the same name (e.g.
|
|
||||||
:ref:`this issue with Postgres <docker-postgres-auth-failed>`).
|
|
||||||
|
|
||||||
|
|
||||||
Prerequisites
|
|
||||||
-------------
|
|
||||||
|
|
||||||
* Docker; if you don't have it yet, follow the `installation instructions`_;
|
|
||||||
* Docker Compose; refer to the official documentation for the `installation guide`_.
|
|
||||||
* Pre-commit; refer to the official documentation for the `pre-commit`_.
|
|
||||||
* Cookiecutter; refer to the official GitHub repository of `Cookiecutter`_
|
|
||||||
|
|
||||||
.. _`installation instructions`: https://docs.docker.com/install/#supported-platforms
|
|
||||||
.. _`installation guide`: https://docs.docker.com/compose/install/
|
|
||||||
.. _`pre-commit`: https://pre-commit.com/#install
|
|
||||||
.. _`Cookiecutter`: https://github.com/cookiecutter/cookiecutter
|
|
||||||
|
|
||||||
Before Getting Started
|
|
||||||
----------------------
|
|
||||||
.. include:: generate-project-block.rst
|
|
||||||
|
|
||||||
Build the Stack
|
|
||||||
---------------
|
|
||||||
|
|
||||||
This can take a while, especially the first time you run this particular command on your development system::
|
|
||||||
|
|
||||||
$ docker compose -f docker-compose.local.yml build
|
|
||||||
|
|
||||||
Generally, if you want to emulate production environment use ``docker-compose.production.yml`` instead. And this is true for any other actions you might need to perform: whenever a switch is required, just do it!
|
|
||||||
|
|
||||||
Before doing any git commit, `pre-commit`_ should be installed globally on your local machine, and then::
|
|
||||||
|
|
||||||
$ git init
|
|
||||||
$ pre-commit install
|
|
||||||
|
|
||||||
Failing to do so will result with a bunch of CI and Linter errors that can be avoided with pre-commit.
|
|
||||||
|
|
||||||
Run the Stack
|
|
||||||
-------------
|
|
||||||
|
|
||||||
This brings up both Django and PostgreSQL. The first time it is run it might take a while to get started, but subsequent runs will occur quickly.
|
|
||||||
|
|
||||||
Open a terminal at the project root and run the following for local development::
|
|
||||||
|
|
||||||
$ docker compose -f docker-compose.local.yml up
|
|
||||||
|
|
||||||
You can also set the environment variable ``COMPOSE_FILE`` pointing to ``docker-compose.local.yml`` like this::
|
|
||||||
|
|
||||||
$ export COMPOSE_FILE=docker-compose.local.yml
|
|
||||||
|
|
||||||
And then run::
|
|
||||||
|
|
||||||
$ docker compose up
|
|
||||||
|
|
||||||
To run in a detached (background) mode, just::
|
|
||||||
|
|
||||||
$ docker compose up -d
|
|
||||||
|
|
||||||
These commands don't run the docs service. In order to run docs service you can run::
|
|
||||||
|
|
||||||
$ docker compose -f docker-compose.docs.yml up
|
|
||||||
|
|
||||||
To run the docs with local services just use::
|
|
||||||
|
|
||||||
$ docker compose -f docker-compose.local.yml -f docker-compose.docs.yml up
|
|
||||||
|
|
||||||
The site should start and be accessible at http://localhost:3000 if you selected Webpack or Gulp as frontend pipeline and http://localhost:8000 otherwise.
|
|
||||||
|
|
||||||
Execute Management Commands
|
|
||||||
---------------------------
|
|
||||||
|
|
||||||
As with any shell command that we wish to run in our container, this is done using the ``docker compose -f docker-compose.local.yml run --rm`` command: ::
|
|
||||||
|
|
||||||
$ docker compose -f docker-compose.local.yml run --rm django python manage.py migrate
|
|
||||||
$ docker compose -f docker-compose.local.yml run --rm django python manage.py createsuperuser
|
|
||||||
|
|
||||||
Here, ``django`` is the target service we are executing the commands against.
|
|
||||||
Also, please note that the ``docker exec`` does not work for running management commands.
|
|
||||||
|
|
||||||
(Optionally) Designate your Docker Development Server IP
|
|
||||||
--------------------------------------------------------
|
|
||||||
|
|
||||||
When ``DEBUG`` is set to ``True``, the host is validated against ``['localhost', '127.0.0.1', '[::1]']``. This is adequate when running a ``virtualenv``. For Docker, in the ``config.settings.local``, add your host development server IP to ``INTERNAL_IPS`` or ``ALLOWED_HOSTS`` if the variable exists.
|
|
||||||
|
|
||||||
.. _envs:
|
|
||||||
|
|
||||||
Configuring the Environment
|
|
||||||
---------------------------
|
|
||||||
|
|
||||||
This is the excerpt from your project's ``docker-compose.local.yml``: ::
|
|
||||||
|
|
||||||
# ...
|
|
||||||
|
|
||||||
postgres:
|
|
||||||
build:
|
|
||||||
context: .
|
|
||||||
dockerfile: ./compose/production/postgres/Dockerfile
|
|
||||||
volumes:
|
|
||||||
- local_postgres_data:/var/lib/postgresql/data
|
|
||||||
- local_postgres_data_backups:/backups
|
|
||||||
env_file:
|
|
||||||
- ./.envs/.local/.postgres
|
|
||||||
|
|
||||||
# ...
|
|
||||||
|
|
||||||
The most important thing for us here now is ``env_file`` section enlisting ``./.envs/.local/.postgres``. Generally, the stack's behavior is governed by a number of environment variables (`env(s)`, for short) residing in ``envs/``, for instance, this is what we generate for you: ::
|
|
||||||
|
|
||||||
.envs
|
|
||||||
├── .local
|
|
||||||
│ ├── .django
|
|
||||||
│ └── .postgres
|
|
||||||
└── .production
|
|
||||||
├── .django
|
|
||||||
└── .postgres
|
|
||||||
|
|
||||||
By convention, for any service ``sI`` in environment ``e`` (you know ``someenv`` is an environment when there is a ``someenv.yml`` file in the project root), given ``sI`` requires configuration, a ``.envs/.e/.sI`` `service configuration` file exists.
|
|
||||||
|
|
||||||
Consider the aforementioned ``.envs/.local/.postgres``: ::
|
|
||||||
|
|
||||||
# PostgreSQL
|
|
||||||
# ------------------------------------------------------------------------------
|
|
||||||
POSTGRES_HOST=postgres
|
|
||||||
POSTGRES_DB=<your project slug>
|
|
||||||
POSTGRES_USER=XgOWtQtJecsAbaIyslwGvFvPawftNaqO
|
|
||||||
POSTGRES_PASSWORD=jSljDz4whHuwO3aJIgVBrqEml5Ycbghorep4uVJ4xjDYQu0LfuTZdctj7y0YcCLu
|
|
||||||
|
|
||||||
The three envs we are presented with here are ``POSTGRES_DB``, ``POSTGRES_USER``, and ``POSTGRES_PASSWORD`` (by the way, their values have also been generated for you). You might have figured out already where these definitions will end up; it's all the same with ``django`` service container envs.
|
|
||||||
|
|
||||||
One final touch: should you ever need to merge ``.envs/.production/*`` in a single ``.env`` run the ``merge_production_dotenvs_in_dotenv.py``: ::
|
|
||||||
|
|
||||||
$ python merge_production_dotenvs_in_dotenv.py
|
|
||||||
|
|
||||||
The ``.env`` file will then be created, with all your production envs residing beside each other.
|
|
||||||
|
|
||||||
|
|
||||||
Tips & Tricks
|
|
||||||
-------------
|
|
||||||
|
|
||||||
Activate a Docker Machine
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
This tells our computer that all future commands are specifically for the dev1 machine. Using the ``eval`` command we can switch machines as needed.::
|
|
||||||
|
|
||||||
$ eval "$(docker-machine env dev1)"
|
|
||||||
|
|
||||||
Add 3rd party python packages
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
To install a new 3rd party python package, you cannot use ``pip install <package_name>``, that would only add the package to the container. The container is ephemeral, so that new library won't be persisted if you run another container. Instead, you should modify the Docker image:
|
|
||||||
You have to modify the relevant requirement file: base, local or production by adding: ::
|
|
||||||
|
|
||||||
<package_name>==<package_version>
|
|
||||||
|
|
||||||
To get this change picked up, you'll need to rebuild the image(s) and restart the running container: ::
|
|
||||||
|
|
||||||
docker compose -f docker-compose.local.yml build
|
|
||||||
docker compose -f docker-compose.local.yml up
|
|
||||||
|
|
||||||
Debugging
|
|
||||||
~~~~~~~~~
|
|
||||||
|
|
||||||
ipdb
|
|
||||||
"""""
|
|
||||||
|
|
||||||
If you are using the following within your code to debug: ::
|
|
||||||
|
|
||||||
import ipdb; ipdb.set_trace()
|
|
||||||
|
|
||||||
Then you may need to run the following for it to work as desired: ::
|
|
||||||
|
|
||||||
$ docker compose -f docker-compose.local.yml run --rm --service-ports django
|
|
||||||
|
|
||||||
|
|
||||||
django-debug-toolbar
|
|
||||||
""""""""""""""""""""
|
|
||||||
|
|
||||||
In order for ``django-debug-toolbar`` to work designate your Docker Machine IP with ``INTERNAL_IPS`` in ``local.py``.
|
|
||||||
|
|
||||||
|
|
||||||
docker
|
|
||||||
""""""
|
|
||||||
|
|
||||||
The ``container_name`` from the yml file can be used to check on containers with docker commands, for example: ::
|
|
||||||
|
|
||||||
$ docker logs <project_slug>_local_celeryworker
|
|
||||||
$ docker top <project_slug>_local_celeryworker
|
|
||||||
|
|
||||||
Notice that the ``container_name`` is generated dynamically using your project slug as a prefix
|
|
||||||
|
|
||||||
Mailpit
|
|
||||||
~~~~~~~
|
|
||||||
|
|
||||||
When developing locally you can go with Mailpit_ for email testing provided ``use_mailpit`` was set to ``y`` on setup. To proceed,
|
|
||||||
|
|
||||||
#. make sure ``<project_slug>_local_mailpit`` container is up and running;
|
|
||||||
|
|
||||||
#. open up ``http://127.0.0.1:8025``.
|
|
||||||
|
|
||||||
.. _Mailpit: https://github.com/axllent/mailpit/
|
|
||||||
|
|
||||||
.. _`CeleryTasks`:
|
|
||||||
|
|
||||||
Celery tasks in local development
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
When not using docker Celery tasks are set to run in Eager mode, so that a full stack is not needed. When using docker the task scheduler will be used by default.
|
|
||||||
|
|
||||||
If you need tasks to be executed on the main thread during development set ``CELERY_TASK_ALWAYS_EAGER = True`` in ``config/settings/local.py``.
|
|
||||||
|
|
||||||
Possible uses could be for testing, or ease of profiling with DJDT.
|
|
||||||
|
|
||||||
.. _`CeleryFlower`:
|
|
||||||
|
|
||||||
Celery Flower
|
|
||||||
~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
`Flower`_ is a "real-time monitor and web admin for Celery distributed task queue".
|
|
||||||
|
|
||||||
Prerequisites:
|
|
||||||
|
|
||||||
* ``use_docker`` was set to ``y`` on project initialization;
|
|
||||||
* ``use_celery`` was set to ``y`` on project initialization.
|
|
||||||
|
|
||||||
By default, it's enabled both in local and production environments (``docker-compose.local.yml`` and ``docker-compose.production.yml`` Docker Compose configs, respectively) through a ``flower`` service. For added security, ``flower`` requires its clients to provide authentication credentials specified as the corresponding environments' ``.envs/.local/.django`` and ``.envs/.production/.django`` ``CELERY_FLOWER_USER`` and ``CELERY_FLOWER_PASSWORD`` environment variables. Check out ``localhost:5555`` and see for yourself.
|
|
||||||
|
|
||||||
.. _`Flower`: https://github.com/mher/flower
|
|
||||||
|
|
||||||
Using Webpack or Gulp
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
If you've opted for Gulp or Webpack as front-end pipeline, the project comes configured with `Sass`_ compilation and `live reloading`_. As you change your Sass/JS source files, the task runner will automatically rebuild the corresponding CSS and JS assets and reload them in your browser without refreshing the page.
|
|
||||||
|
|
||||||
The stack comes with a dedicated node service to build the static assets, watch for changes and proxy requests to the Django app with live reloading scripts injected in the response. For everything to work smoothly, you need to access the application at the port served by the node service, which is http://localhost:3000 by default.
|
|
||||||
|
|
||||||
.. _Sass: https://sass-lang.com/
|
|
||||||
.. _live reloading: https://browsersync.io
|
|
||||||
|
|
||||||
|
|
||||||
Using Just for Docker Commands
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
We have included a ``justfile`` to simplify the use of frequent Docker commands for local development.
|
|
||||||
|
|
||||||
.. warning::
|
|
||||||
Currently, "Just" does not reliably handle signals or forward them to its subprocesses. As a result,
|
|
||||||
pressing CTRL+C (or sending other signals like SIGTERM, SIGINT, or SIGHUP) may only interrupt
|
|
||||||
"Just" itself rather than its subprocesses.
|
|
||||||
For more information, see `this GitHub issue <https://github.com/casey/just/issues/2473>`_.
|
|
||||||
|
|
||||||
First, install Just using one of the methods described in the `official documentation <https://just.systems/man/en/packages.html>`_.
|
|
||||||
|
|
||||||
Here are the available commands:
|
|
||||||
|
|
||||||
- ``just build``
|
|
||||||
Builds the Python image using the local Docker Compose file.
|
|
||||||
|
|
||||||
- ``just up``
|
|
||||||
Starts the containers in detached mode and removes orphaned containers.
|
|
||||||
|
|
||||||
- ``just down``
|
|
||||||
Stops the running containers.
|
|
||||||
|
|
||||||
- ``just prune``
|
|
||||||
Stops and removes containers along with their volumes. You can optionally pass an argument with the service name to prune a single container.
|
|
||||||
|
|
||||||
- ``just logs``
|
|
||||||
Shows container logs. You can optionally pass an argument with the service name to view logs for a specific service.
|
|
||||||
|
|
||||||
- ``just manage <command>``
|
|
||||||
Runs Django management commands within the container. Replace ``<command>`` with any valid Django management command, such as ``migrate``, ``createsuperuser``, or ``shell``.
|
|
||||||
|
|
||||||
|
|
||||||
(Optionally) Developing locally with HTTPS
|
|
||||||
------------------------------------------
|
|
||||||
|
|
||||||
Nginx
|
|
||||||
~~~~~
|
|
||||||
|
|
||||||
If you want to add some sort of social authentication with a OAuth provider such as Facebook, securing your communication to the local development environment will be necessary. These providers usually require that you use an HTTPS URL for the OAuth redirect URL for the Facebook login to work appropriately.
|
|
||||||
|
|
||||||
Here is a link to an article on `how to add HTTPS using Nginx`_ to your local docker installation. This also includes how to serve files from the ``media`` location, in the event that you are want to serve user-uploaded content.
|
|
||||||
|
|
||||||
.. _`how to add HTTPS using Nginx`: https://afroshok.com/cookiecutter-https
|
|
||||||
|
|
||||||
Webpack
|
|
||||||
~~~~~~~
|
|
||||||
|
|
||||||
If you are using Webpack, first install `mkcert`_. It is a simple by design tool that hides all the arcane knowledge required to generate valid TLS certificates. It works for any hostname or IP, including localhost. It supports macOS, Linux, and Windows, and Firefox, Chrome and Java. It even works on mobile devices with a couple manual steps. See https://blog.filippo.io/mkcert-valid-https-certificates-for-localhost/
|
|
||||||
|
|
||||||
.. _`mkcert`: https://github.com/FiloSottile/mkcert/blob/master/README.md#supported-root-stores
|
|
||||||
|
|
||||||
These are the places that you should configure to secure your local environment. Take the certificates that you generated and place them in a folder called ``certs`` in the project's root folder. Configure an ``nginx`` reverse-proxy server as a ``service`` in the ``docker-compose.local.yml``. This makes sure that it does not interfere with our ``traefik`` configuration that is reserved for production environments.
|
|
||||||
|
|
||||||
Assuming that you registered your local hostname as ``my-dev-env.local``, the certificates you will put in the folder should have the names ``my-dev-env.local.crt`` and ``my-dev-env.local.key``.
|
|
||||||
|
|
||||||
1. Add the ``nginx-proxy`` service to the ``docker-compose.local.yml``. ::
|
|
||||||
|
|
||||||
nginx-proxy:
|
|
||||||
image: jwilder/nginx-proxy:alpine
|
|
||||||
container_name: nginx-proxy
|
|
||||||
ports:
|
|
||||||
- "80:80"
|
|
||||||
- "443:443"
|
|
||||||
volumes:
|
|
||||||
- /var/run/docker.sock:/tmp/docker.sock:ro
|
|
||||||
- ./certs:/etc/nginx/certs
|
|
||||||
restart: always
|
|
||||||
depends_on:
|
|
||||||
- node
|
|
||||||
environment:
|
|
||||||
- VIRTUAL_HOST=my-dev-env.local
|
|
||||||
- VIRTUAL_PORT=3000
|
|
||||||
|
|
||||||
2. Add the local secure domain to the ``config/settings/local.py``. You should allow the new hostname ::
|
|
||||||
|
|
||||||
ALLOWED_HOSTS = ["localhost", "0.0.0.0", "127.0.0.1", "my-dev-env.local"]
|
|
||||||
|
|
||||||
3. Add the following configuration to the ``devServer`` section of ``webpack/dev.config.js`` ::
|
|
||||||
|
|
||||||
client: {
|
|
||||||
webSocketURL: 'auto://0.0.0.0:0/ws', // note the `:0` after `0.0.0.0`
|
|
||||||
},
|
|
||||||
|
|
||||||
|
|
||||||
Rebuild your ``docker`` application. ::
|
|
||||||
|
|
||||||
$ docker compose -f docker-compose.local.yml up -d --build
|
|
||||||
|
|
||||||
Go to your browser and type in your URL bar ``https://my-dev-env.local``.
|
|
||||||
|
|
||||||
For more on this configuration, see `https with nginx`_.
|
|
||||||
|
|
||||||
.. _`https with nginx`: https://codewithhugo.com/docker-compose-local-https/
|
|
|
@ -1,254 +0,0 @@
|
||||||
Getting Up and Running Locally
|
|
||||||
==============================
|
|
||||||
|
|
||||||
.. index:: pip, virtualenv, PostgreSQL
|
|
||||||
|
|
||||||
|
|
||||||
Setting Up Development Environment
|
|
||||||
----------------------------------
|
|
||||||
|
|
||||||
Make sure to have the following on your host:
|
|
||||||
|
|
||||||
* Python 3.12
|
|
||||||
* PostgreSQL_.
|
|
||||||
* Redis_, if using Celery
|
|
||||||
* Cookiecutter_
|
|
||||||
|
|
||||||
First things first.
|
|
||||||
|
|
||||||
#. Create a virtualenv: ::
|
|
||||||
|
|
||||||
$ python3.12 -m venv <virtual env path>
|
|
||||||
|
|
||||||
#. Activate the virtualenv you have just created: ::
|
|
||||||
|
|
||||||
$ source <virtual env path>/bin/activate
|
|
||||||
|
|
||||||
#. .. include:: generate-project-block.rst
|
|
||||||
|
|
||||||
#. Install development requirements: ::
|
|
||||||
|
|
||||||
$ cd <what you have entered as the project_slug at setup stage>
|
|
||||||
$ pip install -r requirements/local.txt
|
|
||||||
$ git init # A git repo is required for pre-commit to install
|
|
||||||
$ pre-commit install
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
the `pre-commit` hook exists in the generated project as default.
|
|
||||||
For the details of `pre-commit`, follow the `pre-commit`_ site.
|
|
||||||
|
|
||||||
#. Create a new PostgreSQL database using createdb_: ::
|
|
||||||
|
|
||||||
$ createdb --username=postgres <project_slug>
|
|
||||||
|
|
||||||
``project_slug`` is what you have entered as the project_slug at the setup stage.
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
if this is the first time a database is created on your machine you might need an
|
|
||||||
`initial PostgreSQL set up`_ to allow local connections & set a password for
|
|
||||||
the ``postgres`` user. The `postgres documentation`_ explains the syntax of the config file
|
|
||||||
that you need to change.
|
|
||||||
|
|
||||||
|
|
||||||
#. Set the environment variables for your database(s): ::
|
|
||||||
|
|
||||||
$ export DATABASE_URL=postgres://postgres:<password>@127.0.0.1:5432/<DB name given to createdb>
|
|
||||||
|
|
||||||
.. note::
|
|
||||||
|
|
||||||
Check out the :ref:`settings` page for a comprehensive list of the environments variables.
|
|
||||||
|
|
||||||
.. seealso::
|
|
||||||
|
|
||||||
To help setting up your environment variables, you have a few options:
|
|
||||||
|
|
||||||
* create an ``.env`` file in the root of your project and define all the variables you need in it.
|
|
||||||
Then you just need to have ``DJANGO_READ_DOT_ENV_FILE=True`` in your machine and all the variables
|
|
||||||
will be read.
|
|
||||||
* Use a local environment manager like `direnv`_
|
|
||||||
|
|
||||||
#. Apply migrations: ::
|
|
||||||
|
|
||||||
$ python manage.py migrate
|
|
||||||
|
|
||||||
#. If you're running synchronously, see the application being served through Django development server: ::
|
|
||||||
|
|
||||||
$ python manage.py runserver 0.0.0.0:8000
|
|
||||||
|
|
||||||
or if you're running asynchronously: ::
|
|
||||||
|
|
||||||
$ uvicorn config.asgi:application --host 0.0.0.0 --reload --reload-include '*.html'
|
|
||||||
|
|
||||||
If you've opted for Webpack or Gulp as frontend pipeline, please see the :ref:`dedicated section <bare-metal-webpack-gulp>` below.
|
|
||||||
|
|
||||||
.. _PostgreSQL: https://www.postgresql.org/download/
|
|
||||||
.. _Redis: https://redis.io/download
|
|
||||||
.. _CookieCutter: https://github.com/cookiecutter/cookiecutter
|
|
||||||
.. _createdb: https://www.postgresql.org/docs/current/static/app-createdb.html
|
|
||||||
.. _initial PostgreSQL set up: https://web.archive.org/web/20190303010033/http://suite.opengeo.org/docs/latest/dataadmin/pgGettingStarted/firstconnect.html
|
|
||||||
.. _postgres documentation: https://www.postgresql.org/docs/current/static/auth-pg-hba-conf.html
|
|
||||||
.. _pre-commit: https://pre-commit.com/
|
|
||||||
.. _direnv: https://direnv.net/
|
|
||||||
|
|
||||||
|
|
||||||
Creating Your First Django App
|
|
||||||
-------------------------------
|
|
||||||
|
|
||||||
After setting up your environment, you're ready to add your first app. This project uses the setup from "Two Scoops of Django" with a two-tier layout:
|
|
||||||
|
|
||||||
- **Top Level Repository Root** has config files, documentation, `manage.py`, and more.
|
|
||||||
- **Second Level Django Project Root** is where your Django apps live.
|
|
||||||
- **Second Level Configuration Root** holds settings and URL configurations.
|
|
||||||
|
|
||||||
The project layout looks something like this: ::
|
|
||||||
|
|
||||||
<repository_root>/
|
|
||||||
├── config/
|
|
||||||
│ ├── settings/
|
|
||||||
│ │ ├── __init__.py
|
|
||||||
│ │ ├── base.py
|
|
||||||
│ │ ├── local.py
|
|
||||||
│ │ └── production.py
|
|
||||||
│ ├── urls.py
|
|
||||||
│ └── wsgi.py
|
|
||||||
├── <django_project_root>/
|
|
||||||
│ ├── <name_of_the_app>/
|
|
||||||
│ │ ├── migrations/
|
|
||||||
│ │ ├── admin.py
|
|
||||||
│ │ ├── apps.py
|
|
||||||
│ │ ├── models.py
|
|
||||||
│ │ ├── tests.py
|
|
||||||
│ │ └── views.py
|
|
||||||
│ ├── __init__.py
|
|
||||||
│ └── ...
|
|
||||||
├── requirements/
|
|
||||||
│ ├── base.txt
|
|
||||||
│ ├── local.txt
|
|
||||||
│ └── production.txt
|
|
||||||
├── manage.py
|
|
||||||
├── README.md
|
|
||||||
└── ...
|
|
||||||
|
|
||||||
|
|
||||||
Following this structured approach, here's how to add a new app:
|
|
||||||
|
|
||||||
#. **Create the app** using Django's ``startapp`` command, replacing ``<name-of-the-app>`` with your desired app name: ::
|
|
||||||
|
|
||||||
$ python manage.py startapp <name-of-the-app>
|
|
||||||
|
|
||||||
#. **Move the app** to the Django Project Root, maintaining the project's two-tier structure: ::
|
|
||||||
|
|
||||||
$ mv <name-of-the-app> <django_project_root>/
|
|
||||||
|
|
||||||
#. **Edit the app's apps.py** change ``name = '<name-of-the-app>'`` to ``name = '<django_project_root>.<name-of-the-app>'``.
|
|
||||||
|
|
||||||
#. **Register the new app** by adding it to the ``LOCAL_APPS`` list in ``config/settings/base.py``, integrating it as an official component of your project.
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
Setup Email Backend
|
|
||||||
-------------------
|
|
||||||
|
|
||||||
Mailpit
|
|
||||||
~~~~~~~
|
|
||||||
|
|
||||||
.. note:: In order for the project to support Mailpit_ it must have been bootstrapped with ``use_mailpit`` set to ``y``.
|
|
||||||
|
|
||||||
Mailpit is used to receive emails during development, it is written in Go and has no external dependencies.
|
|
||||||
|
|
||||||
For instance, one of the packages we depend upon, ``django-allauth`` sends verification emails to new users signing up as well as to the existing ones who have not yet verified themselves.
|
|
||||||
|
|
||||||
#. `Download the latest Mailpit release`_ for your OS.
|
|
||||||
|
|
||||||
#. Copy the binary file to the project root.
|
|
||||||
|
|
||||||
#. Make it executable: ::
|
|
||||||
|
|
||||||
$ chmod +x mailpit
|
|
||||||
|
|
||||||
#. Spin up another terminal window and start it there: ::
|
|
||||||
|
|
||||||
./mailpit
|
|
||||||
|
|
||||||
#. Check out `<http://127.0.0.1:8025/>`_ to see how it goes.
|
|
||||||
|
|
||||||
Now you have your own mail server running locally, ready to receive whatever you send it.
|
|
||||||
|
|
||||||
.. _`Download the latest Mailpit release`: https://github.com/axllent/mailpit
|
|
||||||
|
|
||||||
Console
|
|
||||||
~~~~~~~
|
|
||||||
|
|
||||||
.. note:: If you have generated your project with ``use_mailpit`` set to ``n`` this will be a default setup.
|
|
||||||
|
|
||||||
Alternatively, deliver emails over console via ``EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'``.
|
|
||||||
|
|
||||||
In production, we have Mailgun_ configured to have your back!
|
|
||||||
|
|
||||||
.. _Mailgun: https://www.mailgun.com/
|
|
||||||
|
|
||||||
|
|
||||||
Celery
|
|
||||||
------
|
|
||||||
|
|
||||||
If the project is configured to use Celery as a task scheduler then, by default, tasks are set to run on the main thread when developing locally instead of getting sent to a broker. However, if you have Redis setup on your local machine, you can set the following in ``config/settings/local.py``::
|
|
||||||
|
|
||||||
CELERY_TASK_ALWAYS_EAGER = False
|
|
||||||
|
|
||||||
Next, make sure `redis-server` is installed (per the `Getting started with Redis`_ guide) and run the server in one terminal::
|
|
||||||
|
|
||||||
$ redis-server
|
|
||||||
|
|
||||||
Start the Celery worker by running the following command in another terminal::
|
|
||||||
|
|
||||||
$ celery -A config.celery_app worker --loglevel=info
|
|
||||||
|
|
||||||
That Celery worker should be running whenever your app is running, typically as a background process,
|
|
||||||
so that it can pick up any tasks that get queued. Learn more from the `Celery Workers Guide`_.
|
|
||||||
|
|
||||||
The project comes with a simple task for manual testing purposes, inside `<project_slug>/users/tasks.py`. To queue that task locally, start the Django shell, import the task, and call `delay()` on it::
|
|
||||||
|
|
||||||
$ python manage.py shell
|
|
||||||
>> from <project_slug>.users.tasks import get_users_count
|
|
||||||
>> get_users_count.delay()
|
|
||||||
|
|
||||||
You can also use Django admin to queue up tasks, thanks to the `django-celerybeat`_ package.
|
|
||||||
|
|
||||||
.. _Getting started with Redis: https://redis.io/docs/getting-started/
|
|
||||||
.. _Celery Workers Guide: https://docs.celeryq.dev/en/stable/userguide/workers.html
|
|
||||||
.. _django-celerybeat: https://django-celery-beat.readthedocs.io/en/latest/
|
|
||||||
|
|
||||||
|
|
||||||
.. _bare-metal-webpack-gulp:
|
|
||||||
|
|
||||||
Using Webpack or Gulp
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
If you've opted for Gulp or Webpack as front-end pipeline, the project comes configured with `Sass`_ compilation and `live reloading`_. As you change your Sass/JS source files, the task runner will automatically rebuild the corresponding CSS and JS assets and reload them in your browser without refreshing the page.
|
|
||||||
|
|
||||||
#. Make sure that `Node.js`_ v18 is installed on your machine.
|
|
||||||
#. In the project root, install the JS dependencies with::
|
|
||||||
|
|
||||||
$ npm install
|
|
||||||
|
|
||||||
#. Now - with your virtualenv activated - start the application by running::
|
|
||||||
|
|
||||||
$ npm run dev
|
|
||||||
|
|
||||||
This will start 2 processes in parallel: the static assets build loop on one side, and the Django server on the other.
|
|
||||||
|
|
||||||
#. Access your application at the address of the ``node`` service in order to see your correct styles. This is http://localhost:3000 by default.
|
|
||||||
|
|
||||||
.. note:: Do NOT access the application using the Django port (8000 by default), as it will result in broken styles and 404s when accessing static assets.
|
|
||||||
|
|
||||||
|
|
||||||
.. _Node.js: http://nodejs.org/download/
|
|
||||||
.. _Sass: https://sass-lang.com/
|
|
||||||
.. _live reloading: https://browsersync.io
|
|
||||||
|
|
||||||
Summary
|
|
||||||
-------
|
|
||||||
|
|
||||||
Congratulations, you have made it! Keep on reading to unleash full potential of Cookiecutter Django.
|
|
|
@ -1,6 +0,0 @@
|
||||||
Generate a new cookiecutter-django project: ::
|
|
||||||
|
|
||||||
$ cookiecutter gh:cookiecutter/cookiecutter-django
|
|
||||||
|
|
||||||
For more information refer to
|
|
||||||
:ref:`Project Generation Options <template-options>`.
|
|
|
@ -1,33 +0,0 @@
|
||||||
Linters
|
|
||||||
=======
|
|
||||||
|
|
||||||
.. index:: linters
|
|
||||||
|
|
||||||
|
|
||||||
ruff
|
|
||||||
------
|
|
||||||
|
|
||||||
Ruff is a Python linter and code formatter, written in Rust.
|
|
||||||
It is a aggregation of flake8, pylint, pyupgrade and many more.
|
|
||||||
|
|
||||||
Ruff comes with a linter (``ruff check``) and a formatter (``ruff format``).
|
|
||||||
The linter is a wrapper around flake8, pylint, and other linters,
|
|
||||||
and the formatter is a wrapper around black, isort, and other formatters.
|
|
||||||
|
|
||||||
To run ruff without modifying your files: ::
|
|
||||||
|
|
||||||
$ ruff format --diff .
|
|
||||||
$ ruff check .
|
|
||||||
|
|
||||||
Ruff is capable of fixing most of the problems it encounters.
|
|
||||||
Be sure you commit first before running `ruff` so you can restore to a savepoint (and amend afterwards to prevent a double commit. : ::
|
|
||||||
|
|
||||||
$ ruff format .
|
|
||||||
$ ruff check --fix .
|
|
||||||
# be careful with the --unsafe-fixes option, it can break your code
|
|
||||||
$ ruff check --fix --unsafe-fixes .
|
|
||||||
|
|
||||||
The config for ruff is located in pyproject.toml.
|
|
||||||
On of the most important option is `tool.ruff.lint.select`.
|
|
||||||
`select` determines which linters are run. In example, `DJ <https://docs.astral.sh/ruff/rules/#flake8-django-dj>`_ refers to flake8-django.
|
|
||||||
For a full list of available linters, see `https://docs.astral.sh/ruff/rules/ <https://docs.astral.sh/ruff/rules/>`_
|
|
|
@ -1,3 +0,0 @@
|
||||||
```{include} ../../CONTRIBUTING.md
|
|
||||||
|
|
||||||
```
|
|
|
@ -1,104 +0,0 @@
|
||||||
# Maintainer guide
|
|
||||||
|
|
||||||
This document is intended for maintainers of the template.
|
|
||||||
|
|
||||||
## Automated updates
|
|
||||||
|
|
||||||
We use 2 separate services to keep our dependencies up-to-date:
|
|
||||||
|
|
||||||
- Dependabot, which manages updates of Python deps of the template, GitHub actions, npm packages and Docker images.
|
|
||||||
- PyUp, which manages the Python deps for the generated project.
|
|
||||||
|
|
||||||
We don't use Dependabot for the generated project deps because our requirements files are templated, and Dependabot fails to parse them. PyUp is -AFAIK- the only service out there that supports having Jinja tags in the requirements file.
|
|
||||||
|
|
||||||
Updates for the template should be labelled as `project infrastructure` while the ones about the generated project should be labelled as `update`. This is use to work in conjunction with our changelog script (see later).
|
|
||||||
|
|
||||||
## Automation scripts
|
|
||||||
|
|
||||||
We have a few workflows which have been automated over time. They usually run using GitHub actions and might need a few small manual actions to work nicely. Some have a few limitations which we should document here.
|
|
||||||
|
|
||||||
### CI
|
|
||||||
|
|
||||||
`ci.yml`
|
|
||||||
|
|
||||||
The CI workflow tries to cover 2 main aspects of the template:
|
|
||||||
|
|
||||||
- Check all combinations to make sure that valid files are generated with no major linting issues. Issues which are fixed by an auto-formatter after generation aren't considered major, and only aim for best effort. This is under the `test` job.
|
|
||||||
- Run more in-depth tests on a few combinations, by installing dependencies, running type checker and the test suite of the generated project. We try to cover docker (`docker` job) and non-docker (`bare` job) setups.
|
|
||||||
|
|
||||||
We also run the deployment checks, but we don't do much more beyond that for testing the production setup.
|
|
||||||
|
|
||||||
### Django issue checker
|
|
||||||
|
|
||||||
`django-issue-checker.yml`
|
|
||||||
|
|
||||||
This workflow runs daily, on schedule, and checks if there is a new major version of Django (not in the pure SemVer sense) released that we are not running, and list our dependencies compatibility.
|
|
||||||
|
|
||||||
For example, at time of writing, we use Django 4.2, but the latest version of Django is 5.0, so the workflow created a ["Django 5.0" issue](https://github.com/cookiecutter/cookiecutter-django/issues/4724) in GitHub, with a compatibility table and keeps it up to date every day.
|
|
||||||
|
|
||||||
#### Limitations
|
|
||||||
|
|
||||||
Here are a few current and past limitations of the script
|
|
||||||
|
|
||||||
- When a new dependency is added to the template, the script fails to update an existing issue
|
|
||||||
- Not sure what happens when a deps is removed
|
|
||||||
- ~~Unable to parse classifiers without minor version~~
|
|
||||||
- ~~Creates an issue even if we are on the latest version~~
|
|
||||||
|
|
||||||
### Issue manager
|
|
||||||
|
|
||||||
`issue-manager.yml`
|
|
||||||
|
|
||||||
A workflow that uses [Sebastian Ramirez' issue-manager](https://github.com/tiangolo/issue-manager) to help us automate issue management. The tag line from the repo explains it well:
|
|
||||||
|
|
||||||
> Automatically close issues or Pull Requests that have a label, after a custom delay, if no one replies back.
|
|
||||||
|
|
||||||
It runs on a schedule as well as when some actions are taken on issues and pull requests.
|
|
||||||
|
|
||||||
We wait 10 days before closing issues, and we have a few customised reasons, which are configured in the workflow itself. The config should be fairly self-explanatory.
|
|
||||||
|
|
||||||
### Pre-commit auto-update
|
|
||||||
|
|
||||||
`pre-commit-autoupdate.yml`
|
|
||||||
|
|
||||||
Run daily, to do `pre-commit autoupdate` on the template as well as the generated project, and opens a pull request with the changes.
|
|
||||||
|
|
||||||
#### Limitations
|
|
||||||
|
|
||||||
- The PR is open as GitHub action which means that CI does NOT run. The documentation for create-pull-request action [explains why](https://github.com/peter-evans/create-pull-request/blob/main/docs/concepts-guidelines.md#triggering-further-workflow-runs).
|
|
||||||
- Some hooks are also installed as local dependencies (via `requirements/local.txt`), but these are updated separately via PyUP.
|
|
||||||
|
|
||||||
### Update changelog
|
|
||||||
|
|
||||||
`update-changelog.yml`
|
|
||||||
|
|
||||||
Run daily at 2AM to update our changelog and create a GitHub release. This runs a custom script which:
|
|
||||||
|
|
||||||
- List all pull requests merged the day before
|
|
||||||
- The release name is calendar based, so `YYYY.MM.DD`
|
|
||||||
- For each PR:
|
|
||||||
- Get the PR title to summarize the change
|
|
||||||
- Look at the PR labels to classify it in a section of the release notes:
|
|
||||||
- anything labelled `project infrastructure` is excluded
|
|
||||||
- label `update` goes in section "Updated"
|
|
||||||
- label `bug` goes in section "Fixed"
|
|
||||||
- label `docs` goes in section "Documentation"
|
|
||||||
- Default to section "Changed"
|
|
||||||
|
|
||||||
With that in mind, when merging changes, it's a good idea to set the labels and rename the PR title to give a good summary of the change, in the context of the changelog.
|
|
||||||
|
|
||||||
#### Limitations
|
|
||||||
|
|
||||||
- Dependabot updates for npm & Docker have a verbose title, try to rename them to be more readable: `Bump webpack-dev-server from 4.15.1 to 5.0.2 in /{{cookiecutter.project_slug}}` -> `Bump webpack-dev-server to 5.0.2`
|
|
||||||
- ~~Dependencies updates for the template repo (tox, cookiecutter, etc...) don't need to appear in changelog, and need to be labelled as `project infrastructure` manually. By default, they come from PyUp labelled as `update`.~~
|
|
||||||
|
|
||||||
### Update contributors
|
|
||||||
|
|
||||||
`update-contributors.yml`
|
|
||||||
|
|
||||||
Runs on each push to master branch. List the 5 most recently merged pull requests and extract their author. If any of the authors is a new one, updates the `.github/contributors.json`, regenerate the `CONTRIBUTORS.md` from it, and push back the changes to master.
|
|
||||||
|
|
||||||
#### Limitations
|
|
||||||
|
|
||||||
- If you merge a pull request from a new contributor, and merge another one right after, the push to master will fail as the remote will be out of date.
|
|
||||||
- If you merge more than 5 pull requests in a row like this, the new contributor might fail to be added.
|
|
142
docs/Makefile
142
docs/Makefile
|
@ -4,30 +4,150 @@
|
||||||
# You can set these variables from the command line.
|
# You can set these variables from the command line.
|
||||||
SPHINXOPTS =
|
SPHINXOPTS =
|
||||||
SPHINXBUILD = sphinx-build
|
SPHINXBUILD = sphinx-build
|
||||||
SOURCEDIR = .
|
PAPER =
|
||||||
BUILDDIR = _build
|
BUILDDIR = _build
|
||||||
|
|
||||||
.PHONY: help clean html livehtml linkcheck
|
# Internal variables.
|
||||||
|
PAPEROPT_a4 = -D latex_paper_size=a4
|
||||||
|
PAPEROPT_letter = -D latex_paper_size=letter
|
||||||
|
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
||||||
|
# the i18n builder cannot share the environment and doctrees with the others
|
||||||
|
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
||||||
|
|
||||||
|
.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
|
||||||
|
|
||||||
help:
|
help:
|
||||||
@echo "Please use \`make <target>' where <target> is one of"
|
@echo "Please use \`make <target>' where <target> is one of"
|
||||||
@awk '/^#/{c=substr($$0,3);next}c&&/^[[:alpha:]][[:alnum:]_-]+:/{print substr($$1,1,index($$1,":")),c}1{c=0}' $(MAKEFILE_LIST) | column -s: -t
|
@echo " html to make standalone HTML files"
|
||||||
|
@echo " dirhtml to make HTML files named index.html in directories"
|
||||||
|
@echo " singlehtml to make a single large HTML file"
|
||||||
|
@echo " pickle to make pickle files"
|
||||||
|
@echo " json to make JSON files"
|
||||||
|
@echo " htmlhelp to make HTML files and a HTML help project"
|
||||||
|
@echo " qthelp to make HTML files and a qthelp project"
|
||||||
|
@echo " devhelp to make HTML files and a Devhelp project"
|
||||||
|
@echo " epub to make an epub"
|
||||||
|
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
|
||||||
|
@echo " latexpdf to make LaTeX files and run them through pdflatex"
|
||||||
|
@echo " text to make text files"
|
||||||
|
@echo " man to make manual pages"
|
||||||
|
@echo " texinfo to make Texinfo files"
|
||||||
|
@echo " info to make Texinfo files and run them through makeinfo"
|
||||||
|
@echo " gettext to make PO message catalogs"
|
||||||
|
@echo " changes to make an overview of all changed/added/deprecated items"
|
||||||
|
@echo " linkcheck to check all external links for integrity"
|
||||||
|
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
|
||||||
|
|
||||||
# Clean the build output
|
|
||||||
clean:
|
clean:
|
||||||
-rm -rf $(BUILDDIR)/*
|
-rm -rf $(BUILDDIR)/*
|
||||||
|
|
||||||
# Build the HTML docs
|
|
||||||
html:
|
html:
|
||||||
$(SPHINXBUILD) -b html $(SPHINXOPTS) $(SOURCEDIR) $(BUILDDIR)/html
|
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
||||||
|
@echo
|
||||||
|
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
||||||
|
|
||||||
# Build and serve docs with live reload
|
dirhtml:
|
||||||
livehtml:
|
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
|
||||||
sphinx-autobuild -b html --port 9000 --watch . -c . $(SOURCEDIR) $(BUILDDIR)/html
|
@echo
|
||||||
|
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
|
||||||
|
|
||||||
|
singlehtml:
|
||||||
|
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
|
||||||
|
@echo
|
||||||
|
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
|
||||||
|
|
||||||
|
pickle:
|
||||||
|
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
|
||||||
|
@echo
|
||||||
|
@echo "Build finished; now you can process the pickle files."
|
||||||
|
|
||||||
|
json:
|
||||||
|
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
|
||||||
|
@echo
|
||||||
|
@echo "Build finished; now you can process the JSON files."
|
||||||
|
|
||||||
|
htmlhelp:
|
||||||
|
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
|
||||||
|
@echo
|
||||||
|
@echo "Build finished; now you can run HTML Help Workshop with the" \
|
||||||
|
".hhp project file in $(BUILDDIR)/htmlhelp."
|
||||||
|
|
||||||
|
qthelp:
|
||||||
|
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
|
||||||
|
@echo
|
||||||
|
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
|
||||||
|
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
|
||||||
|
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/{{ cookiecutter.project_slug }}.qhcp"
|
||||||
|
@echo "To view the help file:"
|
||||||
|
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/{{ cookiecutter.project_slug }}.qhc"
|
||||||
|
|
||||||
|
devhelp:
|
||||||
|
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
|
||||||
|
@echo
|
||||||
|
@echo "Build finished."
|
||||||
|
@echo "To view the help file:"
|
||||||
|
@echo "# mkdir -p $$HOME/.local/share/devhelp/{{ cookiecutter.project_slug }}"
|
||||||
|
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/{{ cookiecutter.project_slug }}"
|
||||||
|
@echo "# devhelp"
|
||||||
|
|
||||||
|
epub:
|
||||||
|
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
|
||||||
|
@echo
|
||||||
|
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
|
||||||
|
|
||||||
|
latex:
|
||||||
|
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||||
|
@echo
|
||||||
|
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
|
||||||
|
@echo "Run \`make' in that directory to run these through (pdf)latex" \
|
||||||
|
"(use \`make latexpdf' here to do that automatically)."
|
||||||
|
|
||||||
|
latexpdf:
|
||||||
|
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||||
|
@echo "Running LaTeX files through pdflatex..."
|
||||||
|
$(MAKE) -C $(BUILDDIR)/latex all-pdf
|
||||||
|
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||||
|
|
||||||
|
text:
|
||||||
|
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
|
||||||
|
@echo
|
||||||
|
@echo "Build finished. The text files are in $(BUILDDIR)/text."
|
||||||
|
|
||||||
|
man:
|
||||||
|
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
|
||||||
|
@echo
|
||||||
|
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
|
||||||
|
|
||||||
|
texinfo:
|
||||||
|
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||||
|
@echo
|
||||||
|
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
|
||||||
|
@echo "Run \`make' in that directory to run these through makeinfo" \
|
||||||
|
"(use \`make info' here to do that automatically)."
|
||||||
|
|
||||||
|
info:
|
||||||
|
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||||
|
@echo "Running Texinfo files through makeinfo..."
|
||||||
|
make -C $(BUILDDIR)/texinfo info
|
||||||
|
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
|
||||||
|
|
||||||
|
gettext:
|
||||||
|
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
|
||||||
|
@echo
|
||||||
|
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
|
||||||
|
|
||||||
|
changes:
|
||||||
|
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
|
||||||
|
@echo
|
||||||
|
@echo "The overview file is in $(BUILDDIR)/changes."
|
||||||
|
|
||||||
# Check all external links for integrity
|
|
||||||
linkcheck:
|
linkcheck:
|
||||||
$(SPHINXBUILD) -b linkcheck $(SPHINXOPTS) $(SOURCEDIR) $(BUILDDIR)/linkcheck
|
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
|
||||||
@echo
|
@echo
|
||||||
@echo "Link check complete; look for any errors in the above output " \
|
@echo "Link check complete; look for any errors in the above output " \
|
||||||
"or in $(BUILDDIR)/linkcheck/output.txt."
|
"or in $(BUILDDIR)/linkcheck/output.txt."
|
||||||
|
|
||||||
|
doctest:
|
||||||
|
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
|
||||||
|
@echo "Testing of doctests in the sources finished, look at the " \
|
||||||
|
"results in $(BUILDDIR)/doctest/output.txt."
|
||||||
|
|
16
docs/conf.py
16
docs/conf.py
|
@ -23,16 +23,13 @@ now = datetime.now()
|
||||||
|
|
||||||
# Add any Sphinx extension module names here, as strings. They can be extensions
|
# Add any Sphinx extension module names here, as strings. They can be extensions
|
||||||
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
||||||
extensions = ["myst_parser"]
|
extensions = []
|
||||||
|
|
||||||
# Add any paths that contain templates here, relative to this directory.
|
# Add any paths that contain templates here, relative to this directory.
|
||||||
templates_path = ["_templates"]
|
templates_path = ["_templates"]
|
||||||
|
|
||||||
# The suffix of source filenames.
|
# The suffix of source filenames.
|
||||||
source_suffix = {
|
source_suffix = ".rst"
|
||||||
".rst": "restructuredtext",
|
|
||||||
".md": "markdown",
|
|
||||||
}
|
|
||||||
|
|
||||||
# The encoding of source files.
|
# The encoding of source files.
|
||||||
# source_encoding = 'utf-8-sig'
|
# source_encoding = 'utf-8-sig'
|
||||||
|
@ -42,7 +39,7 @@ master_doc = "index"
|
||||||
|
|
||||||
# General information about the project.
|
# General information about the project.
|
||||||
project = "Cookiecutter Django"
|
project = "Cookiecutter Django"
|
||||||
copyright = f"2013-{now.year}, Daniel Roy Greenfeld"
|
copyright = "2013-{}, Daniel Roy Greenfeld".format(now.year)
|
||||||
|
|
||||||
# The version info for the project you're documenting, acts as replacement for
|
# The version info for the project you're documenting, acts as replacement for
|
||||||
# |version| and |release|, also used in various other places throughout the
|
# |version| and |release|, also used in various other places throughout the
|
||||||
|
@ -188,7 +185,7 @@ latex_documents = [
|
||||||
"cookiecutter-django Documentation",
|
"cookiecutter-django Documentation",
|
||||||
"cookiecutter-django",
|
"cookiecutter-django",
|
||||||
"manual",
|
"manual",
|
||||||
),
|
)
|
||||||
]
|
]
|
||||||
|
|
||||||
# The name of an image file (relative to this directory) to place at the top of
|
# The name of an image file (relative to this directory) to place at the top of
|
||||||
|
@ -223,7 +220,7 @@ man_pages = [
|
||||||
"Cookiecutter Django documentation",
|
"Cookiecutter Django documentation",
|
||||||
["Daniel Roy Greenfeld"],
|
["Daniel Roy Greenfeld"],
|
||||||
1,
|
1,
|
||||||
),
|
)
|
||||||
]
|
]
|
||||||
|
|
||||||
# If true, show URL addresses after external links.
|
# If true, show URL addresses after external links.
|
||||||
|
@ -242,7 +239,8 @@ texinfo_documents = [
|
||||||
"Cookiecutter Django documentation",
|
"Cookiecutter Django documentation",
|
||||||
"Daniel Roy Greenfeld",
|
"Daniel Roy Greenfeld",
|
||||||
"Cookiecutter Django",
|
"Cookiecutter Django",
|
||||||
"A Cookiecutter template for creating production-ready Django projects quickly.",
|
"A Cookiecutter template for creating production-ready "
|
||||||
|
"Django projects quickly.",
|
||||||
"Miscellaneous",
|
"Miscellaneous",
|
||||||
)
|
)
|
||||||
]
|
]
|
||||||
|
|
|
@ -12,19 +12,21 @@ Run these commands to deploy the project to Heroku:
|
||||||
|
|
||||||
heroku create --buildpack heroku/python
|
heroku create --buildpack heroku/python
|
||||||
|
|
||||||
# Note: this is not a free plan
|
heroku addons:create heroku-postgresql:hobby-dev
|
||||||
heroku addons:create heroku-postgresql:essential-0
|
|
||||||
|
|
||||||
# On Windows use double quotes for the time zone, e.g.
|
# On Windows use double quotes for the time zone, e.g.
|
||||||
# heroku pg:backups schedule --at "02:00 America/Los_Angeles" DATABASE_URL
|
# heroku pg:backups schedule --at "02:00 America/Los_Angeles" DATABASE_URL
|
||||||
heroku pg:backups schedule --at '02:00 America/Los_Angeles' DATABASE_URL
|
heroku pg:backups schedule --at '02:00 America/Los_Angeles' DATABASE_URL
|
||||||
heroku pg:promote DATABASE_URL
|
heroku pg:promote DATABASE_URL
|
||||||
|
|
||||||
heroku addons:create heroku-redis:mini
|
heroku addons:create heroku-redis:hobby-dev
|
||||||
|
|
||||||
# Assuming you chose Mailgun as mail service (see below for others)
|
# Assuming you chose Mailgun as mail service (see below for others)
|
||||||
heroku addons:create mailgun:starter
|
heroku addons:create mailgun:starter
|
||||||
|
|
||||||
|
heroku config:set PYTHONHASHSEED=random
|
||||||
|
|
||||||
|
heroku config:set WEB_CONCURRENCY=4
|
||||||
|
|
||||||
heroku config:set DJANGO_DEBUG=False
|
heroku config:set DJANGO_DEBUG=False
|
||||||
heroku config:set DJANGO_SETTINGS_MODULE=config.settings.production
|
heroku config:set DJANGO_SETTINGS_MODULE=config.settings.production
|
||||||
heroku config:set DJANGO_SECRET_KEY="$(openssl rand -base64 64)"
|
heroku config:set DJANGO_SECRET_KEY="$(openssl rand -base64 64)"
|
||||||
|
@ -44,7 +46,7 @@ Run these commands to deploy the project to Heroku:
|
||||||
# Assign with AWS_STORAGE_BUCKET_NAME
|
# Assign with AWS_STORAGE_BUCKET_NAME
|
||||||
heroku config:set DJANGO_AWS_STORAGE_BUCKET_NAME=
|
heroku config:set DJANGO_AWS_STORAGE_BUCKET_NAME=
|
||||||
|
|
||||||
git push heroku main
|
git push heroku master
|
||||||
|
|
||||||
heroku run python manage.py createsuperuser
|
heroku run python manage.py createsuperuser
|
||||||
|
|
||||||
|
@ -62,7 +64,7 @@ The script above assumes that you've chose Mailgun as email service. If you want
|
||||||
|
|
||||||
.. warning::
|
.. warning::
|
||||||
|
|
||||||
.. include:: ../includes/mailgun.rst
|
.. include:: mailgun.rst
|
||||||
|
|
||||||
Heroku & Docker
|
Heroku & Docker
|
||||||
+++++++++++++++
|
+++++++++++++++
|
||||||
|
@ -83,6 +85,8 @@ it's in the ``Procfile``, but is turned off by default:
|
||||||
|
|
||||||
.. code-block:: bash
|
.. code-block:: bash
|
||||||
|
|
||||||
|
# Set the broker URL to Redis
|
||||||
|
heroku config:set CELERY_BROKER_URL=`heroku config:get REDIS_URL`
|
||||||
# Scale dyno to 1 instance
|
# Scale dyno to 1 instance
|
||||||
heroku ps:scale worker=1
|
heroku ps:scale worker=1
|
||||||
|
|
||||||
|
@ -105,10 +109,10 @@ Or add the DSN for your account, if you already have one:
|
||||||
.. _Sentry add-on: https://elements.heroku.com/addons/sentry
|
.. _Sentry add-on: https://elements.heroku.com/addons/sentry
|
||||||
|
|
||||||
|
|
||||||
Gulp or Webpack
|
Gulp & Bootstrap compilation
|
||||||
+++++++++++++++
|
++++++++++++++++++++++++++++
|
||||||
|
|
||||||
If you've opted for Gulp or Webpack as frontend pipeline, you'll most likely need to setup
|
If you've opted for Gulp, you'll most likely need to setup
|
||||||
your app to use `multiple buildpacks`_: one for Python & one for Node.js:
|
your app to use `multiple buildpacks`_: one for Python & one for Node.js:
|
||||||
|
|
||||||
.. code-block:: bash
|
.. code-block:: bash
|
||||||
|
@ -117,8 +121,8 @@ your app to use `multiple buildpacks`_: one for Python & one for Node.js:
|
||||||
|
|
||||||
At time of writing, this should do the trick: during deployment,
|
At time of writing, this should do the trick: during deployment,
|
||||||
the Heroku should run ``npm install`` and then ``npm build``,
|
the Heroku should run ``npm install`` and then ``npm build``,
|
||||||
which run the SASS compilation & JS bundling.
|
which runs Gulp in cookiecutter-django.
|
||||||
|
|
||||||
If things don't work, please refer to the Heroku docs.
|
If things don't work, please refer to the Heroku docs.
|
||||||
|
|
||||||
.. _multiple buildpacks: https://devcenter.heroku.com/articles/using-multiple-buildpacks-for-an-app
|
.. _multiple buildpacks: https://devcenter.heroku.com/articles/using-multiple-buildpacks-for-an-app
|
|
@ -34,10 +34,9 @@ Make sure your project is fully committed and pushed up to Bitbucket or Github o
|
||||||
|
|
||||||
git clone <my-repo-url> # you can also use hg
|
git clone <my-repo-url> # you can also use hg
|
||||||
cd my-project-name
|
cd my-project-name
|
||||||
mkvirtualenv --python=/usr/bin/python3.10 my-project-name
|
mkvirtualenv --python=/usr/bin/python3.9 my-project-name
|
||||||
pip install -r requirements/production.txt # may take a few minutes
|
pip install -r requirements/production.txt # may take a few minutes
|
||||||
|
|
||||||
.. note:: We're creating the virtualenv using Python 3.10 (``--python=/usr/bin/python3.10```), although Cookiecutter Django generates a project for Python 3.12. This is because, at time of writing, PythonAnywhere only supports Python 3.10. It shouldn't be a problem, but if is, you may try changing the Python version to 3.12 and see if it works. If it does, please let us know, or even better, submit a pull request to update this section.
|
|
||||||
|
|
||||||
Setting environment variables in the console
|
Setting environment variables in the console
|
||||||
--------------------------------------------
|
--------------------------------------------
|
|
@ -1,7 +1,7 @@
|
||||||
Deployment with Docker
|
Deployment with Docker
|
||||||
======================
|
======================
|
||||||
|
|
||||||
.. index:: deployment, docker, docker compose, compose
|
.. index:: deployment, docker, docker-compose, compose
|
||||||
|
|
||||||
|
|
||||||
Prerequisites
|
Prerequisites
|
||||||
|
@ -14,7 +14,7 @@ Prerequisites
|
||||||
Understanding the Docker Compose Setup
|
Understanding the Docker Compose Setup
|
||||||
--------------------------------------
|
--------------------------------------
|
||||||
|
|
||||||
Before you begin, check out the ``docker-compose.production.yml`` file in the root of this project. Keep note of how it provides configuration for the following services:
|
Before you begin, check out the ``production.yml`` file in the root of this project. Keep note of how it provides configuration for the following services:
|
||||||
|
|
||||||
* ``django``: your application running behind ``Gunicorn``;
|
* ``django``: your application running behind ``Gunicorn``;
|
||||||
* ``postgres``: PostgreSQL database with the application's relational data;
|
* ``postgres``: PostgreSQL database with the application's relational data;
|
||||||
|
@ -55,7 +55,7 @@ You will probably also need to setup the Mail backend, for example by adding a `
|
||||||
|
|
||||||
.. warning::
|
.. warning::
|
||||||
|
|
||||||
.. include:: ../includes/mailgun.rst
|
.. include:: mailgun.rst
|
||||||
|
|
||||||
|
|
||||||
Optional: Use AWS IAM Role for EC2 instance
|
Optional: Use AWS IAM Role for EC2 instance
|
||||||
|
@ -84,32 +84,6 @@ You can read more about this feature and how to configure it, at `Automatic HTTP
|
||||||
|
|
||||||
.. _Automatic HTTPS: https://docs.traefik.io/https/acme/
|
.. _Automatic HTTPS: https://docs.traefik.io/https/acme/
|
||||||
|
|
||||||
.. _webpack-whitenoise-limitation:
|
|
||||||
|
|
||||||
Webpack without Whitenoise limitation
|
|
||||||
-------------------------------------
|
|
||||||
|
|
||||||
If you opt for Webpack without Whitenoise, Webpack needs to know the static URL at build time, when running ``docker compose build`` (See ``webpack/prod.config.js``). Depending on your setup, this URL may come from the following environment variables:
|
|
||||||
|
|
||||||
- ``AWS_STORAGE_BUCKET_NAME``
|
|
||||||
- ``DJANGO_AWS_S3_CUSTOM_DOMAIN``
|
|
||||||
- ``DJANGO_GCP_STORAGE_BUCKET_NAME``
|
|
||||||
- ``DJANGO_AZURE_CONTAINER_NAME``
|
|
||||||
|
|
||||||
The Django settings are getting these values at runtime via the ``.envs/.production/.django`` file , but Docker does not read this file at build time, it only look for a ``.env`` in the root of the project. Failing to pass the values correctly will result in a page without CSS styles nor javascript.
|
|
||||||
|
|
||||||
To solve this, you can either:
|
|
||||||
|
|
||||||
1. merge all the env files into ``.env`` by running::
|
|
||||||
|
|
||||||
merge_production_dotenvs_in_dotenv.py
|
|
||||||
|
|
||||||
2. create a ``.env`` file in the root of the project with just variables you need. You'll need to also define them in ``.envs/.production/.django`` (hence duplicating them).
|
|
||||||
3. set these variables when running the build command::
|
|
||||||
|
|
||||||
DJANGO_AWS_S3_CUSTOM_DOMAIN=example.com docker compose -f docker-compose.production.yml build``.
|
|
||||||
|
|
||||||
None of these options are ideal, we're open to suggestions on how to improve this. If you think you have one, please open an issue or a pull request.
|
|
||||||
|
|
||||||
(Optional) Postgres Data Volume Modifications
|
(Optional) Postgres Data Volume Modifications
|
||||||
---------------------------------------------
|
---------------------------------------------
|
||||||
|
@ -122,42 +96,42 @@ Building & Running Production Stack
|
||||||
|
|
||||||
You will need to build the stack first. To do that, run::
|
You will need to build the stack first. To do that, run::
|
||||||
|
|
||||||
docker compose -f docker-compose.production.yml build
|
docker-compose -f production.yml build
|
||||||
|
|
||||||
Once this is ready, you can run it with::
|
Once this is ready, you can run it with::
|
||||||
|
|
||||||
docker compose -f docker-compose.production.yml up
|
docker-compose -f production.yml up
|
||||||
|
|
||||||
To run the stack and detach the containers, run::
|
To run the stack and detach the containers, run::
|
||||||
|
|
||||||
docker compose -f docker-compose.production.yml up -d
|
docker-compose -f production.yml up -d
|
||||||
|
|
||||||
To run a migration, open up a second terminal and run::
|
To run a migration, open up a second terminal and run::
|
||||||
|
|
||||||
docker compose -f docker-compose.production.yml run --rm django python manage.py migrate
|
docker-compose -f production.yml run --rm django python manage.py migrate
|
||||||
|
|
||||||
To create a superuser, run::
|
To create a superuser, run::
|
||||||
|
|
||||||
docker compose -f docker-compose.production.yml run --rm django python manage.py createsuperuser
|
docker-compose -f production.yml run --rm django python manage.py createsuperuser
|
||||||
|
|
||||||
If you need a shell, run::
|
If you need a shell, run::
|
||||||
|
|
||||||
docker compose -f docker-compose.production.yml run --rm django python manage.py shell
|
docker-compose -f production.yml run --rm django python manage.py shell
|
||||||
|
|
||||||
To check the logs out, run::
|
To check the logs out, run::
|
||||||
|
|
||||||
docker compose -f docker-compose.production.yml logs
|
docker-compose -f production.yml logs
|
||||||
|
|
||||||
If you want to scale your application, run::
|
If you want to scale your application, run::
|
||||||
|
|
||||||
docker compose -f docker-compose.production.yml up --scale django=4
|
docker-compose -f production.yml up --scale django=4
|
||||||
docker compose -f docker-compose.production.yml up --scale celeryworker=2
|
docker-compose -f production.yml up --scale celeryworker=2
|
||||||
|
|
||||||
.. warning:: don't try to scale ``postgres``, ``celerybeat``, or ``traefik``.
|
.. warning:: don't try to scale ``postgres``, ``celerybeat``, or ``traefik``.
|
||||||
|
|
||||||
To see how your containers are doing run::
|
To see how your containers are doing run::
|
||||||
|
|
||||||
docker compose -f docker-compose.production.yml ps
|
docker-compose -f production.yml ps
|
||||||
|
|
||||||
|
|
||||||
Example: Supervisor
|
Example: Supervisor
|
||||||
|
@ -165,12 +139,12 @@ Example: Supervisor
|
||||||
|
|
||||||
Once you are ready with your initial setup, you want to make sure that your application is run by a process manager to
|
Once you are ready with your initial setup, you want to make sure that your application is run by a process manager to
|
||||||
survive reboots and auto restarts in case of an error. You can use the process manager you are most familiar with. All
|
survive reboots and auto restarts in case of an error. You can use the process manager you are most familiar with. All
|
||||||
it needs to do is to run ``docker compose -f docker-compose.production.yml up`` in your projects root directory.
|
it needs to do is to run ``docker-compose -f production.yml up`` in your projects root directory.
|
||||||
|
|
||||||
If you are using ``supervisor``, you can use this file as a starting point::
|
If you are using ``supervisor``, you can use this file as a starting point::
|
||||||
|
|
||||||
[program:{{cookiecutter.project_slug}}]
|
[program:{{cookiecutter.project_slug}}]
|
||||||
command=docker compose -f docker-compose.production.yml up
|
command=docker-compose -f production.yml up
|
||||||
directory=/path/to/{{cookiecutter.project_slug}}
|
directory=/path/to/{{cookiecutter.project_slug}}
|
||||||
redirect_stderr=true
|
redirect_stderr=true
|
||||||
autostart=true
|
autostart=true
|
||||||
|
@ -187,7 +161,3 @@ For status check, run::
|
||||||
|
|
||||||
supervisorctl status
|
supervisorctl status
|
||||||
|
|
||||||
Media files without cloud provider
|
|
||||||
----------------------------------
|
|
||||||
|
|
||||||
If you chose no cloud provider and Docker, the media files will be served by an nginx service, from a ``production_django_media`` volume. Make sure to keep this around to avoid losing any media files.
|
|
304
docs/developing-locally-docker.rst
Normal file
304
docs/developing-locally-docker.rst
Normal file
|
@ -0,0 +1,304 @@
|
||||||
|
Getting Up and Running Locally With Docker
|
||||||
|
==========================================
|
||||||
|
|
||||||
|
.. index:: Docker
|
||||||
|
|
||||||
|
The steps below will get you up and running with a local development environment.
|
||||||
|
All of these commands assume you are in the root of your generated project.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
If you're new to Docker, please be aware that some resources are cached system-wide
|
||||||
|
and might reappear if you generate a project multiple times with the same name (e.g.
|
||||||
|
:ref:`this issue with Postgres <docker-postgres-auth-failed>`).
|
||||||
|
|
||||||
|
|
||||||
|
Prerequisites
|
||||||
|
-------------
|
||||||
|
|
||||||
|
* Docker; if you don't have it yet, follow the `installation instructions`_;
|
||||||
|
* Docker Compose; refer to the official documentation for the `installation guide`_.
|
||||||
|
* Pre-commit; refer to the official documentation for the `pre-commit`_.
|
||||||
|
|
||||||
|
.. _`installation instructions`: https://docs.docker.com/install/#supported-platforms
|
||||||
|
.. _`installation guide`: https://docs.docker.com/compose/install/
|
||||||
|
.. _`pre-commit`: https://pre-commit.com/#install
|
||||||
|
|
||||||
|
Build the Stack
|
||||||
|
---------------
|
||||||
|
|
||||||
|
This can take a while, especially the first time you run this particular command on your development system::
|
||||||
|
|
||||||
|
$ docker-compose -f local.yml build
|
||||||
|
|
||||||
|
Generally, if you want to emulate production environment use ``production.yml`` instead. And this is true for any other actions you might need to perform: whenever a switch is required, just do it!
|
||||||
|
|
||||||
|
Before doing any git commit, `pre-commit`_ should be installed globally on your local machine, and then::
|
||||||
|
|
||||||
|
$ git init
|
||||||
|
$ pre-commit install
|
||||||
|
|
||||||
|
Failing to do so will result with a bunch of CI and Linter errors that can be avoided with pre-commit.
|
||||||
|
|
||||||
|
|
||||||
|
Run the Stack
|
||||||
|
-------------
|
||||||
|
|
||||||
|
This brings up both Django and PostgreSQL. The first time it is run it might take a while to get started, but subsequent runs will occur quickly.
|
||||||
|
|
||||||
|
Open a terminal at the project root and run the following for local development::
|
||||||
|
|
||||||
|
$ docker-compose -f local.yml up
|
||||||
|
|
||||||
|
You can also set the environment variable ``COMPOSE_FILE`` pointing to ``local.yml`` like this::
|
||||||
|
|
||||||
|
$ export COMPOSE_FILE=local.yml
|
||||||
|
|
||||||
|
And then run::
|
||||||
|
|
||||||
|
$ docker-compose up
|
||||||
|
|
||||||
|
To run in a detached (background) mode, just::
|
||||||
|
|
||||||
|
$ docker-compose up -d
|
||||||
|
|
||||||
|
|
||||||
|
Execute Management Commands
|
||||||
|
---------------------------
|
||||||
|
|
||||||
|
As with any shell command that we wish to run in our container, this is done using the ``docker-compose -f local.yml run --rm`` command: ::
|
||||||
|
|
||||||
|
$ docker-compose -f local.yml run --rm django python manage.py migrate
|
||||||
|
$ docker-compose -f local.yml run --rm django python manage.py createsuperuser
|
||||||
|
|
||||||
|
Here, ``django`` is the target service we are executing the commands against.
|
||||||
|
|
||||||
|
|
||||||
|
(Optionally) Designate your Docker Development Server IP
|
||||||
|
--------------------------------------------------------
|
||||||
|
|
||||||
|
When ``DEBUG`` is set to ``True``, the host is validated against ``['localhost', '127.0.0.1', '[::1]']``. This is adequate when running a ``virtualenv``. For Docker, in the ``config.settings.local``, add your host development server IP to ``INTERNAL_IPS`` or ``ALLOWED_HOSTS`` if the variable exists.
|
||||||
|
|
||||||
|
|
||||||
|
.. _envs:
|
||||||
|
|
||||||
|
Configuring the Environment
|
||||||
|
---------------------------
|
||||||
|
|
||||||
|
This is the excerpt from your project's ``local.yml``: ::
|
||||||
|
|
||||||
|
# ...
|
||||||
|
|
||||||
|
postgres:
|
||||||
|
build:
|
||||||
|
context: .
|
||||||
|
dockerfile: ./compose/production/postgres/Dockerfile
|
||||||
|
volumes:
|
||||||
|
- local_postgres_data:/var/lib/postgresql/data
|
||||||
|
- local_postgres_data_backups:/backups
|
||||||
|
env_file:
|
||||||
|
- ./.envs/.local/.postgres
|
||||||
|
|
||||||
|
# ...
|
||||||
|
|
||||||
|
The most important thing for us here now is ``env_file`` section enlisting ``./.envs/.local/.postgres``. Generally, the stack's behavior is governed by a number of environment variables (`env(s)`, for short) residing in ``envs/``, for instance, this is what we generate for you: ::
|
||||||
|
|
||||||
|
.envs
|
||||||
|
├── .local
|
||||||
|
│ ├── .django
|
||||||
|
│ └── .postgres
|
||||||
|
└── .production
|
||||||
|
├── .django
|
||||||
|
└── .postgres
|
||||||
|
|
||||||
|
By convention, for any service ``sI`` in environment ``e`` (you know ``someenv`` is an environment when there is a ``someenv.yml`` file in the project root), given ``sI`` requires configuration, a ``.envs/.e/.sI`` `service configuration` file exists.
|
||||||
|
|
||||||
|
Consider the aforementioned ``.envs/.local/.postgres``: ::
|
||||||
|
|
||||||
|
# PostgreSQL
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
POSTGRES_HOST=postgres
|
||||||
|
POSTGRES_DB=<your project slug>
|
||||||
|
POSTGRES_USER=XgOWtQtJecsAbaIyslwGvFvPawftNaqO
|
||||||
|
POSTGRES_PASSWORD=jSljDz4whHuwO3aJIgVBrqEml5Ycbghorep4uVJ4xjDYQu0LfuTZdctj7y0YcCLu
|
||||||
|
|
||||||
|
The three envs we are presented with here are ``POSTGRES_DB``, ``POSTGRES_USER``, and ``POSTGRES_PASSWORD`` (by the way, their values have also been generated for you). You might have figured out already where these definitions will end up; it's all the same with ``django`` service container envs.
|
||||||
|
|
||||||
|
One final touch: should you ever need to merge ``.envs/.production/*`` in a single ``.env`` run the ``merge_production_dotenvs_in_dotenv.py``: ::
|
||||||
|
|
||||||
|
$ python merge_production_dotenvs_in_dotenv.py
|
||||||
|
|
||||||
|
The ``.env`` file will then be created, with all your production envs residing beside each other.
|
||||||
|
|
||||||
|
|
||||||
|
Tips & Tricks
|
||||||
|
-------------
|
||||||
|
|
||||||
|
Activate a Docker Machine
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
This tells our computer that all future commands are specifically for the dev1 machine. Using the ``eval`` command we can switch machines as needed.::
|
||||||
|
|
||||||
|
$ eval "$(docker-machine env dev1)"
|
||||||
|
|
||||||
|
Debugging
|
||||||
|
~~~~~~~~~
|
||||||
|
|
||||||
|
ipdb
|
||||||
|
"""""
|
||||||
|
|
||||||
|
If you are using the following within your code to debug: ::
|
||||||
|
|
||||||
|
import ipdb; ipdb.set_trace()
|
||||||
|
|
||||||
|
Then you may need to run the following for it to work as desired: ::
|
||||||
|
|
||||||
|
$ docker-compose -f local.yml run --rm --service-ports django
|
||||||
|
|
||||||
|
|
||||||
|
django-debug-toolbar
|
||||||
|
""""""""""""""""""""
|
||||||
|
|
||||||
|
In order for ``django-debug-toolbar`` to work designate your Docker Machine IP with ``INTERNAL_IPS`` in ``local.py``.
|
||||||
|
|
||||||
|
|
||||||
|
docker
|
||||||
|
""""""
|
||||||
|
|
||||||
|
The ``container_name`` from the yml file can be used to check on containers with docker commands, for example: ::
|
||||||
|
|
||||||
|
$ docker logs <project_slug>_local_celeryworker
|
||||||
|
$ docker top <project_slug>_local_celeryworker
|
||||||
|
|
||||||
|
|
||||||
|
Notice that the ``container_name`` is generated dynamically using your project slug as a prefix
|
||||||
|
|
||||||
|
Mailhog
|
||||||
|
~~~~~~~
|
||||||
|
|
||||||
|
When developing locally you can go with MailHog_ for email testing provided ``use_mailhog`` was set to ``y`` on setup. To proceed,
|
||||||
|
|
||||||
|
#. make sure ``<project_slug>_local_mailhog`` container is up and running;
|
||||||
|
|
||||||
|
#. open up ``http://127.0.0.1:8025``.
|
||||||
|
|
||||||
|
.. _Mailhog: https://github.com/mailhog/MailHog/
|
||||||
|
|
||||||
|
.. _`CeleryTasks`:
|
||||||
|
|
||||||
|
Celery tasks in local development
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
When not using docker Celery tasks are set to run in Eager mode, so that a full stack is not needed. When using docker the task scheduler will be used by default.
|
||||||
|
|
||||||
|
If you need tasks to be executed on the main thread during development set ``CELERY_TASK_ALWAYS_EAGER = True`` in ``config/settings/local.py``.
|
||||||
|
|
||||||
|
Possible uses could be for testing, or ease of profiling with DJDT.
|
||||||
|
|
||||||
|
.. _`CeleryFlower`:
|
||||||
|
|
||||||
|
Celery Flower
|
||||||
|
~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
`Flower`_ is a "real-time monitor and web admin for Celery distributed task queue".
|
||||||
|
|
||||||
|
Prerequisites:
|
||||||
|
|
||||||
|
* ``use_docker`` was set to ``y`` on project initialization;
|
||||||
|
* ``use_celery`` was set to ``y`` on project initialization.
|
||||||
|
|
||||||
|
By default, it's enabled both in local and production environments (``local.yml`` and ``production.yml`` Docker Compose configs, respectively) through a ``flower`` service. For added security, ``flower`` requires its clients to provide authentication credentials specified as the corresponding environments' ``.envs/.local/.django`` and ``.envs/.production/.django`` ``CELERY_FLOWER_USER`` and ``CELERY_FLOWER_PASSWORD`` environment variables. Check out ``localhost:5555`` and see for yourself.
|
||||||
|
|
||||||
|
.. _`Flower`: https://github.com/mher/flower
|
||||||
|
|
||||||
|
Developing locally with HTTPS
|
||||||
|
-----------------------------
|
||||||
|
|
||||||
|
Increasingly it is becoming necessary to develop software in a secure environment in order that there are very few changes when deploying to production. Recently Facebook changed their policies for apps/sites that use Facebook login which requires the use of an HTTPS URL for the OAuth redirect URL. So if you want to use the ``users`` application with a OAuth provider such as Facebook, securing your communication to the local development environment will be necessary.
|
||||||
|
|
||||||
|
In order to create a secure environment, we need to have a trusted SSL certificate installed in our Docker application.
|
||||||
|
|
||||||
|
#. **Let's Encrypt**
|
||||||
|
|
||||||
|
The official line from Let’s Encrypt is:
|
||||||
|
|
||||||
|
[For local development section] ... The best option: Generate your own certificate, either self-signed or signed by a local root, and trust it in your operating system’s trust store. Then use that certificate in your local web server. See below for details.
|
||||||
|
|
||||||
|
See `letsencrypt.org - certificates-for-localhost`_
|
||||||
|
|
||||||
|
.. _`letsencrypt.org - certificates-for-localhost`: https://letsencrypt.org/docs/certificates-for-localhost/
|
||||||
|
|
||||||
|
#. **mkcert: Valid Https Certificates For Localhost**
|
||||||
|
|
||||||
|
`mkcert`_ is a simple by design tool that hides all the arcane knowledge required to generate valid TLS certificates. It works for any hostname or IP, including localhost. It supports macOS, Linux, and Windows, and Firefox, Chrome and Java. It even works on mobile devices with a couple manual steps.
|
||||||
|
|
||||||
|
See https://blog.filippo.io/mkcert-valid-https-certificates-for-localhost/
|
||||||
|
|
||||||
|
.. _`mkcert`: https://github.com/FiloSottile/mkcert/blob/master/README.md#supported-root-stores
|
||||||
|
|
||||||
|
After installing a trusted TLS certificate, configure your docker installation. We are going to configure an ``nginx`` reverse-proxy server. This makes sure that it does not interfere with our ``traefik`` configuration that is reserved for production environments.
|
||||||
|
|
||||||
|
These are the places that you should configure to secure your local environment.
|
||||||
|
|
||||||
|
certs
|
||||||
|
~~~~~
|
||||||
|
|
||||||
|
Take the certificates that you generated and place them in a folder called ``certs`` in the project's root folder. Assuming that you registered your local hostname as ``my-dev-env.local``, the certificates you will put in the folder should have the names ``my-dev-env.local.crt`` and ``my-dev-env.local.key``.
|
||||||
|
|
||||||
|
local.yml
|
||||||
|
~~~~~~~~~
|
||||||
|
|
||||||
|
#. Add the ``nginx-proxy`` service. ::
|
||||||
|
|
||||||
|
...
|
||||||
|
|
||||||
|
nginx-proxy:
|
||||||
|
image: jwilder/nginx-proxy:alpine
|
||||||
|
container_name: nginx-proxy
|
||||||
|
ports:
|
||||||
|
- "80:80"
|
||||||
|
- "443:443"
|
||||||
|
volumes:
|
||||||
|
- /var/run/docker.sock:/tmp/docker.sock:ro
|
||||||
|
- ./certs:/etc/nginx/certs
|
||||||
|
restart: always
|
||||||
|
depends_on:
|
||||||
|
- django
|
||||||
|
|
||||||
|
...
|
||||||
|
|
||||||
|
#. Link the ``nginx-proxy`` to ``django`` through environment variables.
|
||||||
|
|
||||||
|
``django`` already has an ``.env`` file connected to it. Add the following variables. You should do this especially if you are working with a team and you want to keep your local environment details to yourself.
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
# HTTPS
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
VIRTUAL_HOST=my-dev-env.local
|
||||||
|
VIRTUAL_PORT=8000
|
||||||
|
|
||||||
|
The services run behind the reverse proxy.
|
||||||
|
|
||||||
|
config/settings/local.py
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
You should allow the new hostname. ::
|
||||||
|
|
||||||
|
ALLOWED_HOSTS = ["localhost", "0.0.0.0", "127.0.0.1", "my-dev-env.local"]
|
||||||
|
|
||||||
|
Rebuild your ``docker`` application. ::
|
||||||
|
|
||||||
|
$ docker-compose -f local.yml up -d --build
|
||||||
|
|
||||||
|
Go to your browser and type in your URL bar ``https://my-dev-env.local``
|
||||||
|
|
||||||
|
See `https with nginx`_ for more information on this configuration.
|
||||||
|
|
||||||
|
.. _`https with nginx`: https://codewithhugo.com/docker-compose-local-https/
|
||||||
|
|
||||||
|
.gitignore
|
||||||
|
~~~~~~~~~~
|
||||||
|
|
||||||
|
Add ``certs/*`` to the ``.gitignore`` file. This allows the folder to be included in the repo but its contents to be ignored.
|
||||||
|
|
||||||
|
*This configuration is for local development environments only. Do not use this for production since you might expose your local* ``rootCA-key.pem``.
|
180
docs/developing-locally.rst
Normal file
180
docs/developing-locally.rst
Normal file
|
@ -0,0 +1,180 @@
|
||||||
|
Getting Up and Running Locally
|
||||||
|
==============================
|
||||||
|
|
||||||
|
.. index:: pip, virtualenv, PostgreSQL
|
||||||
|
|
||||||
|
|
||||||
|
Setting Up Development Environment
|
||||||
|
----------------------------------
|
||||||
|
|
||||||
|
Make sure to have the following on your host:
|
||||||
|
|
||||||
|
* Python 3.9
|
||||||
|
* PostgreSQL_.
|
||||||
|
* Redis_, if using Celery
|
||||||
|
* Cookiecutter_
|
||||||
|
|
||||||
|
First things first.
|
||||||
|
|
||||||
|
#. Create a virtualenv: ::
|
||||||
|
|
||||||
|
$ python3.9 -m venv <virtual env path>
|
||||||
|
|
||||||
|
#. Activate the virtualenv you have just created: ::
|
||||||
|
|
||||||
|
$ source <virtual env path>/bin/activate
|
||||||
|
|
||||||
|
#. Install cookiecutter-django: ::
|
||||||
|
|
||||||
|
$ cookiecutter gh:cookiecutter/cookiecutter-django
|
||||||
|
|
||||||
|
#. Install development requirements: ::
|
||||||
|
|
||||||
|
$ cd <what you have entered as the project_slug at setup stage>
|
||||||
|
$ pip install -r requirements/local.txt
|
||||||
|
$ git init # A git repo is required for pre-commit to install
|
||||||
|
$ pre-commit install
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
the `pre-commit` hook exists in the generated project as default.
|
||||||
|
For the details of `pre-commit`, follow the `pre-commit`_ site.
|
||||||
|
|
||||||
|
#. Create a new PostgreSQL database using createdb_: ::
|
||||||
|
|
||||||
|
$ createdb --username=postgres <project_slug>
|
||||||
|
``project_slug`` is what you have entered as the project_slug at the setup stage.
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
if this is the first time a database is created on your machine you might need an
|
||||||
|
`initial PostgreSQL set up`_ to allow local connections & set a password for
|
||||||
|
the ``postgres`` user. The `postgres documentation`_ explains the syntax of the config file
|
||||||
|
that you need to change.
|
||||||
|
|
||||||
|
|
||||||
|
#. Set the environment variables for your database(s): ::
|
||||||
|
|
||||||
|
$ export DATABASE_URL=postgres://postgres:<password>@127.0.0.1:5432/<DB name given to createdb>
|
||||||
|
# Optional: set broker URL if using Celery
|
||||||
|
$ export CELERY_BROKER_URL=redis://localhost:6379/0
|
||||||
|
|
||||||
|
.. note::
|
||||||
|
|
||||||
|
Check out the :ref:`settings` page for a comprehensive list of the environments variables.
|
||||||
|
|
||||||
|
.. seealso::
|
||||||
|
|
||||||
|
To help setting up your environment variables, you have a few options:
|
||||||
|
|
||||||
|
* create an ``.env`` file in the root of your project and define all the variables you need in it.
|
||||||
|
Then you just need to have ``DJANGO_READ_DOT_ENV_FILE=True`` in your machine and all the variables
|
||||||
|
will be read.
|
||||||
|
* Use a local environment manager like `direnv`_
|
||||||
|
|
||||||
|
#. Apply migrations: ::
|
||||||
|
|
||||||
|
$ python manage.py migrate
|
||||||
|
|
||||||
|
#. If you're running synchronously, see the application being served through Django development server: ::
|
||||||
|
|
||||||
|
$ python manage.py runserver 0.0.0.0:8000
|
||||||
|
|
||||||
|
or if you're running asynchronously: ::
|
||||||
|
|
||||||
|
$ uvicorn config.asgi:application --host 0.0.0.0 --reload
|
||||||
|
|
||||||
|
.. _PostgreSQL: https://www.postgresql.org/download/
|
||||||
|
.. _Redis: https://redis.io/download
|
||||||
|
.. _CookieCutter: https://github.com/cookiecutter/cookiecutter
|
||||||
|
.. _createdb: https://www.postgresql.org/docs/current/static/app-createdb.html
|
||||||
|
.. _initial PostgreSQL set up: https://web.archive.org/web/20190303010033/http://suite.opengeo.org/docs/latest/dataadmin/pgGettingStarted/firstconnect.html
|
||||||
|
.. _postgres documentation: https://www.postgresql.org/docs/current/static/auth-pg-hba-conf.html
|
||||||
|
.. _pre-commit: https://pre-commit.com/
|
||||||
|
.. _direnv: https://direnv.net/
|
||||||
|
|
||||||
|
|
||||||
|
Setup Email Backend
|
||||||
|
-------------------
|
||||||
|
|
||||||
|
MailHog
|
||||||
|
~~~~~~~
|
||||||
|
|
||||||
|
.. note:: In order for the project to support MailHog_ it must have been bootstrapped with ``use_mailhog`` set to ``y``.
|
||||||
|
|
||||||
|
MailHog is used to receive emails during development, it is written in Go and has no external dependencies.
|
||||||
|
|
||||||
|
For instance, one of the packages we depend upon, ``django-allauth`` sends verification emails to new users signing up as well as to the existing ones who have not yet verified themselves.
|
||||||
|
|
||||||
|
#. `Download the latest MailHog release`_ for your OS.
|
||||||
|
|
||||||
|
#. Rename the build to ``MailHog``.
|
||||||
|
|
||||||
|
#. Copy the file to the project root.
|
||||||
|
|
||||||
|
#. Make it executable: ::
|
||||||
|
|
||||||
|
$ chmod +x MailHog
|
||||||
|
|
||||||
|
#. Spin up another terminal window and start it there: ::
|
||||||
|
|
||||||
|
./MailHog
|
||||||
|
|
||||||
|
#. Check out `<http://127.0.0.1:8025/>`_ to see how it goes.
|
||||||
|
|
||||||
|
Now you have your own mail server running locally, ready to receive whatever you send it.
|
||||||
|
|
||||||
|
.. _`Download the latest MailHog release`: https://github.com/mailhog/MailHog
|
||||||
|
|
||||||
|
Console
|
||||||
|
~~~~~~~
|
||||||
|
|
||||||
|
.. note:: If you have generated your project with ``use_mailhog`` set to ``n`` this will be a default setup.
|
||||||
|
|
||||||
|
Alternatively, deliver emails over console via ``EMAIL_BACKEND = 'django.core.mail.backends.console.EmailBackend'``.
|
||||||
|
|
||||||
|
In production, we have Mailgun_ configured to have your back!
|
||||||
|
|
||||||
|
.. _Mailgun: https://www.mailgun.com/
|
||||||
|
|
||||||
|
|
||||||
|
Celery
|
||||||
|
------
|
||||||
|
|
||||||
|
If the project is configured to use Celery as a task scheduler then by default tasks are set to run on the main thread
|
||||||
|
when developing locally. If you have the appropriate setup on your local machine then set the following
|
||||||
|
in ``config/settings/local.py``::
|
||||||
|
|
||||||
|
CELERY_TASK_ALWAYS_EAGER = False
|
||||||
|
|
||||||
|
To run Celery locally, make sure redis-server is installed (instructions are available at https://redis.io/topics/quickstart), run the server in one terminal with `redis-server`, and then start celery in another terminal with the following command::
|
||||||
|
|
||||||
|
celery -A config.celery_app worker --loglevel=info
|
||||||
|
|
||||||
|
|
||||||
|
Sass Compilation & Live Reloading
|
||||||
|
---------------------------------
|
||||||
|
|
||||||
|
If you've opted for Gulp as front-end pipeline, the project comes configured with `Sass`_ compilation and `live reloading`_. As you change you Sass/JS source files, the task runner will automatically rebuild the corresponding CSS and JS assets and reload them in your browser without refreshing the page.
|
||||||
|
|
||||||
|
#. Make sure that `Node.js`_ v16 is installed on your machine.
|
||||||
|
#. In the project root, install the JS dependencies with::
|
||||||
|
|
||||||
|
$ npm install
|
||||||
|
|
||||||
|
#. Now - with your virtualenv activated - start the application by running::
|
||||||
|
|
||||||
|
$ npm run dev
|
||||||
|
|
||||||
|
The app will now run with live reloading enabled, applying front-end changes dynamically.
|
||||||
|
|
||||||
|
.. note:: The task will start 2 processes in parallel: the static assets build loop on one side, and the Django server on the other. You do NOT need to run Django as your would normally with ``manage.py runserver``.
|
||||||
|
|
||||||
|
.. _Node.js: http://nodejs.org/download/
|
||||||
|
.. _Sass: https://sass-lang.com/
|
||||||
|
.. _live reloading: https://browsersync.io
|
||||||
|
|
||||||
|
Summary
|
||||||
|
-------
|
||||||
|
|
||||||
|
Congratulations, you have made it! Keep on reading to unleash full potential of Cookiecutter Django.
|
|
@ -1,14 +1,14 @@
|
||||||
PostgreSQL Backups with Docker
|
PostgreSQL Backups with Docker
|
||||||
==============================
|
==============================
|
||||||
|
|
||||||
.. note:: For brevity it is assumed that you will be running the below commands against local environment, however, this is by no means mandatory so feel free to switch to ``docker-compose.production.yml`` when needed.
|
.. note:: For brevity it is assumed that you will be running the below commands against local environment, however, this is by no means mandatory so feel free to switch to ``production.yml`` when needed.
|
||||||
|
|
||||||
|
|
||||||
Prerequisites
|
Prerequisites
|
||||||
-------------
|
-------------
|
||||||
|
|
||||||
#. the project was generated with ``use_docker`` set to ``y``;
|
#. the project was generated with ``use_docker`` set to ``y``;
|
||||||
#. the stack is up and running: ``docker compose -f docker-compose.local.yml up -d postgres``.
|
#. the stack is up and running: ``docker-compose -f local.yml up -d postgres``.
|
||||||
|
|
||||||
|
|
||||||
Creating a Backup
|
Creating a Backup
|
||||||
|
@ -16,7 +16,7 @@ Creating a Backup
|
||||||
|
|
||||||
To create a backup, run::
|
To create a backup, run::
|
||||||
|
|
||||||
$ docker compose -f docker-compose.local.yml exec postgres backup
|
$ docker-compose -f local.yml exec postgres backup
|
||||||
|
|
||||||
Assuming your project's database is named ``my_project`` here is what you will see: ::
|
Assuming your project's database is named ``my_project`` here is what you will see: ::
|
||||||
|
|
||||||
|
@ -31,7 +31,7 @@ Viewing the Existing Backups
|
||||||
|
|
||||||
To list existing backups, ::
|
To list existing backups, ::
|
||||||
|
|
||||||
$ docker compose -f docker-compose.local.yml exec postgres backups
|
$ docker-compose -f local.yml exec postgres backups
|
||||||
|
|
||||||
These are the sample contents of ``/backups``: ::
|
These are the sample contents of ``/backups``: ::
|
||||||
|
|
||||||
|
@ -55,9 +55,9 @@ With a single backup file copied to ``.`` that would be ::
|
||||||
|
|
||||||
$ docker cp 9c5c3f055843:/backups/backup_2018_03_13T09_05_07.sql.gz .
|
$ docker cp 9c5c3f055843:/backups/backup_2018_03_13T09_05_07.sql.gz .
|
||||||
|
|
||||||
You can also get the container ID using ``docker compose -f docker-compose.local.yml ps -q postgres`` so if you want to automate your backups, you don't have to check the container ID manually every time. Here is the full command ::
|
You can also get the container ID using ``docker-compose -f local.yml ps -q postgres`` so if you want to automate your backups, you don't have to check the container ID manually every time. Here is the full command ::
|
||||||
|
|
||||||
$ docker cp $(docker compose -f docker-compose.local.yml ps -q postgres):/backups ./backups
|
$ docker cp $(docker-compose -f local.yml ps -q postgres):/backups ./backups
|
||||||
|
|
||||||
.. _`command`: https://docs.docker.com/engine/reference/commandline/cp/
|
.. _`command`: https://docs.docker.com/engine/reference/commandline/cp/
|
||||||
|
|
||||||
|
@ -66,7 +66,7 @@ Restoring from the Existing Backup
|
||||||
|
|
||||||
To restore from one of the backups you have already got (take the ``backup_2018_03_13T09_05_07.sql.gz`` for example), ::
|
To restore from one of the backups you have already got (take the ``backup_2018_03_13T09_05_07.sql.gz`` for example), ::
|
||||||
|
|
||||||
$ docker compose -f docker-compose.local.yml exec postgres restore backup_2018_03_13T09_05_07.sql.gz
|
$ docker-compose -f local.yml exec postgres restore backup_2018_03_13T09_05_07.sql.gz
|
||||||
|
|
||||||
You will see something like ::
|
You will see something like ::
|
||||||
|
|
||||||
|
@ -92,36 +92,7 @@ You will see something like ::
|
||||||
|
|
||||||
Backup to Amazon S3
|
Backup to Amazon S3
|
||||||
----------------------------------
|
----------------------------------
|
||||||
|
|
||||||
For uploading your backups to Amazon S3 you can use the aws cli container. There is an upload command for uploading the postgres /backups directory recursively and there is a download command for downloading a specific backup. The default S3 environment variables are used. ::
|
For uploading your backups to Amazon S3 you can use the aws cli container. There is an upload command for uploading the postgres /backups directory recursively and there is a download command for downloading a specific backup. The default S3 environment variables are used. ::
|
||||||
|
|
||||||
$ docker compose -f docker-compose.production.yml run --rm awscli upload
|
$ docker-compose -f production.yml run --rm awscli upload
|
||||||
$ docker compose -f docker-compose.production.yml run --rm awscli download backup_2018_03_13T09_05_07.sql.gz
|
$ docker-compose -f production.yml run --rm awscli download backup_2018_03_13T09_05_07.sql.gz
|
||||||
|
|
||||||
Remove Backup
|
|
||||||
----------------------------------
|
|
||||||
|
|
||||||
To remove backup you can use the ``rmbackup`` command. This will remove the backup from the ``/backups`` directory. ::
|
|
||||||
|
|
||||||
$ docker compose -f docker-compose.local.yml exec postgres rmbackup backup_2018_03_13T09_05_07.sql.gz
|
|
||||||
|
|
||||||
|
|
||||||
Upgrading PostgreSQL
|
|
||||||
----------------------------------
|
|
||||||
|
|
||||||
Upgrading PostgreSQL in your project requires a series of carefully executed steps. Start by halting all containers, excluding the postgres container. Following this, create a backup and proceed to remove the outdated data volume. ::
|
|
||||||
|
|
||||||
$ docker compose -f docker-compose.local.yml down
|
|
||||||
$ docker compose -f docker-compose.local.yml up -d postgres
|
|
||||||
$ docker compose -f docker-compose.local.yml run --rm postgres backup
|
|
||||||
$ docker compose -f docker-compose.local.yml down
|
|
||||||
$ docker volume rm my_project_postgres_data
|
|
||||||
|
|
||||||
.. note:: Neglecting to remove the old data volume may lead to issues, such as the new postgres container failing to start with errors like ``FATAL: database files are incompatible with server``, and ``could not translate host name "postgres" to address: Name or service not known``.
|
|
||||||
|
|
||||||
To complete the upgrade, update the PostgreSQL version in the corresponding Dockerfile (e.g. ``compose/production/postgres/Dockerfile``) and build a new version of PostgreSQL. ::
|
|
||||||
|
|
||||||
$ docker compose -f docker-compose.local.yml build postgres
|
|
||||||
$ docker compose -f docker-compose.local.yml up -d postgres
|
|
||||||
$ docker compose -f docker-compose.local.yml run --rm postgres restore backup_2018_03_13T09_05_07.sql.gz
|
|
||||||
$ docker compose -f docker-compose.local.yml up -d
|
|
|
@ -11,7 +11,7 @@ After you have set up to `develop locally`_, run the following command from the
|
||||||
|
|
||||||
If you set up your project to `develop locally with docker`_, run the following command: ::
|
If you set up your project to `develop locally with docker`_, run the following command: ::
|
||||||
|
|
||||||
$ docker compose -f docker-compose.docs.yml up
|
$ docker-compose -f local.yml up docs
|
||||||
|
|
||||||
Navigate to port 9000 on your host to see the documentation. This will be opened automatically at `localhost`_ for local, non-docker development.
|
Navigate to port 9000 on your host to see the documentation. This will be opened automatically at `localhost`_ for local, non-docker development.
|
||||||
|
|
|
@ -6,7 +6,7 @@ FAQ
|
||||||
Why is there a django.contrib.sites directory in Cookiecutter Django?
|
Why is there a django.contrib.sites directory in Cookiecutter Django?
|
||||||
---------------------------------------------------------------------
|
---------------------------------------------------------------------
|
||||||
|
|
||||||
It is there to add a migration so you don't have to manually change the ``sites.Site`` record from ``example.com`` to whatever your domain is. Instead, your ``{{cookiecutter.domain_name}}`` and ``{{cookiecutter.project_name}}`` value is placed by **Cookiecutter** in the domain and name fields respectively.
|
It is there to add a migration so you don't have to manually change the ``sites.Site`` record from ``example.com`` to whatever your domain is. Instead, your ``{{cookiecutter.domain_name}}`` and {{cookiecutter.project_name}} value is placed by **Cookiecutter** in the domain and name fields respectively.
|
||||||
|
|
||||||
See `0003_set_site_domain_and_name.py`_.
|
See `0003_set_site_domain_and_name.py`_.
|
||||||
|
|
||||||
|
@ -22,6 +22,6 @@ TODO
|
||||||
Why doesn't this follow the layout from Two Scoops of Django?
|
Why doesn't this follow the layout from Two Scoops of Django?
|
||||||
-------------------------------------------------------------
|
-------------------------------------------------------------
|
||||||
|
|
||||||
You may notice that some elements of this project do not exactly match what we describe in chapter 3 of `Two Scoops of Django 3.x`_. The reason for that is this project, amongst other things, serves as a test bed for trying out new ideas and concepts. Sometimes they work, sometimes they don't, but the end result is that it won't necessarily match precisely what is described in the book I co-authored.
|
You may notice that some elements of this project do not exactly match what we describe in chapter 3 of `Two Scoops of Django 1.11`_. The reason for that is this project, amongst other things, serves as a test bed for trying out new ideas and concepts. Sometimes they work, sometimes they don't, but the end result is that it won't necessarily match precisely what is described in the book I co-authored.
|
||||||
|
|
||||||
.. _Two Scoops of Django 3.x: https://www.feldroy.com/two-scoops-press#two-scoops-of-django
|
.. _Two Scoops of Django 1.11: https://www.feldroy.com/collections/django/products/two-scoops-of-django-1-11
|
|
@ -7,51 +7,26 @@ Powered by Cookiecutter_, Cookiecutter Django is a project template for jumpstar
|
||||||
|
|
||||||
.. _cookiecutter: https://github.com/cookiecutter/cookiecutter
|
.. _cookiecutter: https://github.com/cookiecutter/cookiecutter
|
||||||
|
|
||||||
.. toctree::
|
Contents
|
||||||
:maxdepth: 2
|
--------
|
||||||
:caption: Getting Started
|
|
||||||
|
|
||||||
1-getting-started/project-generation-options
|
|
||||||
1-getting-started/settings
|
|
||||||
|
|
||||||
.. toctree::
|
.. toctree::
|
||||||
:maxdepth: 2
|
:maxdepth: 2
|
||||||
:caption: Local Development
|
|
||||||
|
|
||||||
2-local-development/developing-locally
|
project-generation-options
|
||||||
2-local-development/developing-locally-docker
|
developing-locally
|
||||||
|
developing-locally-docker
|
||||||
.. toctree::
|
settings
|
||||||
:maxdepth: 2
|
linters
|
||||||
:caption: Deployment
|
testing
|
||||||
|
document
|
||||||
3-deployment/deployment-on-pythonanywhere
|
deployment-on-pythonanywhere
|
||||||
3-deployment/deployment-on-heroku
|
deployment-on-heroku
|
||||||
3-deployment/deployment-with-docker
|
deployment-with-docker
|
||||||
|
docker-postgres-backups
|
||||||
.. toctree::
|
websocket
|
||||||
:maxdepth: 2
|
faq
|
||||||
:caption: Guides
|
troubleshooting
|
||||||
|
|
||||||
4-guides/docker-postgres-backups
|
|
||||||
4-guides/linters
|
|
||||||
4-guides/testing
|
|
||||||
4-guides/document
|
|
||||||
4-guides/websocket
|
|
||||||
|
|
||||||
.. toctree::
|
|
||||||
:maxdepth: 2
|
|
||||||
:caption: Help
|
|
||||||
|
|
||||||
5-help/faq
|
|
||||||
5-help/troubleshooting
|
|
||||||
|
|
||||||
.. toctree::
|
|
||||||
:maxdepth: 2
|
|
||||||
:caption: About
|
|
||||||
|
|
||||||
6-about/contributing
|
|
||||||
6-about/maintainer-guide
|
|
||||||
|
|
||||||
Indices and tables
|
Indices and tables
|
||||||
------------------
|
------------------
|
||||||
|
|
43
docs/linters.rst
Normal file
43
docs/linters.rst
Normal file
|
@ -0,0 +1,43 @@
|
||||||
|
Linters
|
||||||
|
=======
|
||||||
|
|
||||||
|
.. index:: linters
|
||||||
|
|
||||||
|
|
||||||
|
flake8
|
||||||
|
------
|
||||||
|
|
||||||
|
To run flake8: ::
|
||||||
|
|
||||||
|
$ flake8
|
||||||
|
|
||||||
|
The config for flake8 is located in setup.cfg. It specifies:
|
||||||
|
|
||||||
|
* Set max line length to 120 chars
|
||||||
|
* Exclude ``.tox,.git,*/migrations/*,*/static/CACHE/*,docs,node_modules``
|
||||||
|
|
||||||
|
pylint
|
||||||
|
------
|
||||||
|
|
||||||
|
To run pylint: ::
|
||||||
|
|
||||||
|
$ pylint <python files that you wish to lint>
|
||||||
|
|
||||||
|
The config for pylint is located in .pylintrc. It specifies:
|
||||||
|
|
||||||
|
* Use the pylint_django plugin. If using Celery, also use pylint_celery.
|
||||||
|
* Set max line length to 120 chars
|
||||||
|
* Disable linting messages for missing docstring and invalid name
|
||||||
|
* max-parents=13
|
||||||
|
|
||||||
|
pycodestyle
|
||||||
|
-----------
|
||||||
|
|
||||||
|
This is included in flake8's checks, but you can also run it separately to see a more detailed report: ::
|
||||||
|
|
||||||
|
$ pycodestyle <python files that you wish to lint>
|
||||||
|
|
||||||
|
The config for pycodestyle is located in setup.cfg. It specifies:
|
||||||
|
|
||||||
|
* Set max line length to 120 chars
|
||||||
|
* Exclude ``.tox,.git,*/migrations/*,*/static/CACHE/*,docs,node_modules``
|
153
docs/make.bat
153
docs/make.bat
|
@ -5,20 +5,36 @@ REM Command file for Sphinx documentation
|
||||||
if "%SPHINXBUILD%" == "" (
|
if "%SPHINXBUILD%" == "" (
|
||||||
set SPHINXBUILD=sphinx-build
|
set SPHINXBUILD=sphinx-build
|
||||||
)
|
)
|
||||||
set SOURCEDIR=.
|
|
||||||
set BUILDDIR=_build
|
set BUILDDIR=_build
|
||||||
set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS%
|
set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
|
||||||
set I18NSPHINXOPTS=%SPHINXOPTS% .
|
set I18NSPHINXOPTS=%SPHINXOPTS% .
|
||||||
|
if NOT "%PAPER%" == "" (
|
||||||
|
set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
|
||||||
|
set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
|
||||||
|
)
|
||||||
|
|
||||||
if "%1" == "" goto help
|
if "%1" == "" goto help
|
||||||
|
|
||||||
if "%1" == "help" (
|
if "%1" == "help" (
|
||||||
:help
|
:help
|
||||||
echo.Please use `make ^<target^>` where ^<target^> is one of
|
echo.Please use `make ^<target^>` where ^<target^> is one of
|
||||||
echo. clean to clean the build directory
|
|
||||||
echo. html to make standalone HTML files
|
echo. html to make standalone HTML files
|
||||||
echo. livehtml to build and serve docs with live reload
|
echo. dirhtml to make HTML files named index.html in directories
|
||||||
|
echo. singlehtml to make a single large HTML file
|
||||||
|
echo. pickle to make pickle files
|
||||||
|
echo. json to make JSON files
|
||||||
|
echo. htmlhelp to make HTML files and a HTML help project
|
||||||
|
echo. qthelp to make HTML files and a qthelp project
|
||||||
|
echo. devhelp to make HTML files and a Devhelp project
|
||||||
|
echo. epub to make an epub
|
||||||
|
echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
|
||||||
|
echo. text to make text files
|
||||||
|
echo. man to make manual pages
|
||||||
|
echo. texinfo to make Texinfo files
|
||||||
|
echo. gettext to make PO message catalogs
|
||||||
|
echo. changes to make an overview over all changed/added/deprecated items
|
||||||
echo. linkcheck to check all external links for integrity
|
echo. linkcheck to check all external links for integrity
|
||||||
|
echo. doctest to run all doctests embedded in the documentation if enabled
|
||||||
goto end
|
goto end
|
||||||
)
|
)
|
||||||
|
|
||||||
|
@ -29,22 +45,132 @@ if "%1" == "clean" (
|
||||||
)
|
)
|
||||||
|
|
||||||
if "%1" == "html" (
|
if "%1" == "html" (
|
||||||
%SPHINXBUILD% -b html %ALLSPHINXOPTS% %SOURCEDIR% %BUILDDIR%/html
|
%SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
|
||||||
if errorlevel 1 exit /b 1
|
if errorlevel 1 exit /b 1
|
||||||
echo.
|
echo.
|
||||||
echo.Build finished. The HTML pages are in %BUILDDIR%/html.
|
echo.Build finished. The HTML pages are in %BUILDDIR%/html.
|
||||||
goto end
|
goto end
|
||||||
)
|
)
|
||||||
|
|
||||||
if "%1" == "livehtml" (
|
if "%1" == "dirhtml" (
|
||||||
sphinx-autobuild -b html --port 9000 --watch . -c . %SOURCEDIR% %BUILDDIR%/html
|
%SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
|
||||||
%SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
|
|
||||||
if errorlevel 1 exit /b 1
|
if errorlevel 1 exit /b 1
|
||||||
|
echo.
|
||||||
|
echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
|
||||||
|
goto end
|
||||||
|
)
|
||||||
|
|
||||||
|
if "%1" == "singlehtml" (
|
||||||
|
%SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
|
||||||
|
if errorlevel 1 exit /b 1
|
||||||
|
echo.
|
||||||
|
echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
|
||||||
|
goto end
|
||||||
|
)
|
||||||
|
|
||||||
|
if "%1" == "pickle" (
|
||||||
|
%SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
|
||||||
|
if errorlevel 1 exit /b 1
|
||||||
|
echo.
|
||||||
|
echo.Build finished; now you can process the pickle files.
|
||||||
|
goto end
|
||||||
|
)
|
||||||
|
|
||||||
|
if "%1" == "json" (
|
||||||
|
%SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
|
||||||
|
if errorlevel 1 exit /b 1
|
||||||
|
echo.
|
||||||
|
echo.Build finished; now you can process the JSON files.
|
||||||
|
goto end
|
||||||
|
)
|
||||||
|
|
||||||
|
if "%1" == "htmlhelp" (
|
||||||
|
%SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
|
||||||
|
if errorlevel 1 exit /b 1
|
||||||
|
echo.
|
||||||
|
echo.Build finished; now you can run HTML Help Workshop with the ^
|
||||||
|
.hhp project file in %BUILDDIR%/htmlhelp.
|
||||||
|
goto end
|
||||||
|
)
|
||||||
|
|
||||||
|
if "%1" == "qthelp" (
|
||||||
|
%SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
|
||||||
|
if errorlevel 1 exit /b 1
|
||||||
|
echo.
|
||||||
|
echo.Build finished; now you can run "qcollectiongenerator" with the ^
|
||||||
|
.qhcp project file in %BUILDDIR%/qthelp, like this:
|
||||||
|
echo.^> qcollectiongenerator %BUILDDIR%\qthelp\{{ cookiecutter.project_slug }}.qhcp
|
||||||
|
echo.To view the help file:
|
||||||
|
echo.^> assistant -collectionFile %BUILDDIR%\qthelp\{{ cookiecutter.project_slug }}.ghc
|
||||||
|
goto end
|
||||||
|
)
|
||||||
|
|
||||||
|
if "%1" == "devhelp" (
|
||||||
|
%SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
|
||||||
|
if errorlevel 1 exit /b 1
|
||||||
|
echo.
|
||||||
|
echo.Build finished.
|
||||||
|
goto end
|
||||||
|
)
|
||||||
|
|
||||||
|
if "%1" == "epub" (
|
||||||
|
%SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
|
||||||
|
if errorlevel 1 exit /b 1
|
||||||
|
echo.
|
||||||
|
echo.Build finished. The epub file is in %BUILDDIR%/epub.
|
||||||
|
goto end
|
||||||
|
)
|
||||||
|
|
||||||
|
if "%1" == "latex" (
|
||||||
|
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
|
||||||
|
if errorlevel 1 exit /b 1
|
||||||
|
echo.
|
||||||
|
echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
|
||||||
|
goto end
|
||||||
|
)
|
||||||
|
|
||||||
|
if "%1" == "text" (
|
||||||
|
%SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
|
||||||
|
if errorlevel 1 exit /b 1
|
||||||
|
echo.
|
||||||
|
echo.Build finished. The text files are in %BUILDDIR%/text.
|
||||||
|
goto end
|
||||||
|
)
|
||||||
|
|
||||||
|
if "%1" == "man" (
|
||||||
|
%SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
|
||||||
|
if errorlevel 1 exit /b 1
|
||||||
|
echo.
|
||||||
|
echo.Build finished. The manual pages are in %BUILDDIR%/man.
|
||||||
|
goto end
|
||||||
|
)
|
||||||
|
|
||||||
|
if "%1" == "texinfo" (
|
||||||
|
%SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
|
||||||
|
if errorlevel 1 exit /b 1
|
||||||
|
echo.
|
||||||
|
echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
|
||||||
|
goto end
|
||||||
|
)
|
||||||
|
|
||||||
|
if "%1" == "gettext" (
|
||||||
|
%SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
|
||||||
|
if errorlevel 1 exit /b 1
|
||||||
|
echo.
|
||||||
|
echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
|
||||||
|
goto end
|
||||||
|
)
|
||||||
|
|
||||||
|
if "%1" == "changes" (
|
||||||
|
%SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
|
||||||
|
if errorlevel 1 exit /b 1
|
||||||
|
echo.
|
||||||
|
echo.The overview file is in %BUILDDIR%/changes.
|
||||||
goto end
|
goto end
|
||||||
)
|
)
|
||||||
|
|
||||||
if "%1" == "linkcheck" (
|
if "%1" == "linkcheck" (
|
||||||
%SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %SOURCEDIR% %BUILDDIR%/linkcheck
|
%SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
|
||||||
if errorlevel 1 exit /b 1
|
if errorlevel 1 exit /b 1
|
||||||
echo.
|
echo.
|
||||||
echo.Link check complete; look for any errors in the above output ^
|
echo.Link check complete; look for any errors in the above output ^
|
||||||
|
@ -52,4 +178,13 @@ or in %BUILDDIR%/linkcheck/output.txt.
|
||||||
goto end
|
goto end
|
||||||
)
|
)
|
||||||
|
|
||||||
|
if "%1" == "doctest" (
|
||||||
|
%SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
|
||||||
|
if errorlevel 1 exit /b 1
|
||||||
|
echo.
|
||||||
|
echo.Testing of doctests in the sources finished, look at the ^
|
||||||
|
results in %BUILDDIR%/doctest/output.txt.
|
||||||
|
goto end
|
||||||
|
)
|
||||||
|
|
||||||
:end
|
:end
|
||||||
|
|
|
@ -24,13 +24,6 @@ author_name:
|
||||||
email:
|
email:
|
||||||
The email address you want to identify yourself in the project.
|
The email address you want to identify yourself in the project.
|
||||||
|
|
||||||
username_type:
|
|
||||||
The type of username you want to use in the project. This can be either
|
|
||||||
``username`` or ``email``. If you choose ``username``, the ``email`` field
|
|
||||||
will be included. If you choose ``email``, the ``username`` field will be
|
|
||||||
excluded. It is best practice to always include an email field, so there is
|
|
||||||
no option for having just the ``username`` field.
|
|
||||||
|
|
||||||
domain_name:
|
domain_name:
|
||||||
The domain name you plan to use for your project once it goes live.
|
The domain name you plan to use for your project once it goes live.
|
||||||
Note that it can be safely changed later on whenever you need to.
|
Note that it can be safely changed later on whenever you need to.
|
||||||
|
@ -53,34 +46,29 @@ timezone:
|
||||||
windows:
|
windows:
|
||||||
Indicates whether the project should be configured for development on Windows.
|
Indicates whether the project should be configured for development on Windows.
|
||||||
|
|
||||||
editor:
|
use_pycharm:
|
||||||
Select an editor to use. The choices are:
|
Indicates whether the project should be configured for development with PyCharm_.
|
||||||
|
|
||||||
1. None
|
|
||||||
2. PyCharm_
|
|
||||||
3. `VS Code`_
|
|
||||||
|
|
||||||
use_docker:
|
use_docker:
|
||||||
Indicates whether the project should be configured to use Docker_, `Docker Compose`_ and `devcontainer`_.
|
Indicates whether the project should be configured to use Docker_ and `Docker Compose`_.
|
||||||
|
|
||||||
postgresql_version:
|
postgresql_version:
|
||||||
Select a PostgreSQL_ version to use. The choices are:
|
Select a PostgreSQL_ version to use. The choices are:
|
||||||
|
|
||||||
1. 17
|
1. 14.1
|
||||||
2. 16
|
2. 13.5
|
||||||
3. 15
|
3. 12.9
|
||||||
4. 14
|
4. 11.14
|
||||||
5. 13
|
5. 10.19
|
||||||
|
|
||||||
cloud_provider:
|
cloud_provider:
|
||||||
Select a cloud provider for static & media files. The choices are:
|
Select a cloud provider for static & media files. The choices are:
|
||||||
|
|
||||||
1. AWS_
|
1. AWS_
|
||||||
2. GCP_
|
2. GCP_
|
||||||
3. Azure_
|
3. None
|
||||||
4. None
|
|
||||||
|
|
||||||
If you choose no cloud provider and docker, the production stack will serve the media files via an nginx Docker service. Without Docker, the media files won't work.
|
Note that if you choose no cloud provider, media files won't work.
|
||||||
|
|
||||||
mail_service:
|
mail_service:
|
||||||
Select an email service that Django-Anymail provides
|
Select an email service that Django-Anymail provides
|
||||||
|
@ -91,7 +79,7 @@ mail_service:
|
||||||
4. Mandrill_
|
4. Mandrill_
|
||||||
5. Postmark_
|
5. Postmark_
|
||||||
6. SendGrid_
|
6. SendGrid_
|
||||||
7. `Brevo (formerly SendinBlue)`_
|
7. SendinBlue_
|
||||||
8. SparkPost_
|
8. SparkPost_
|
||||||
9. `Other SMTP`_
|
9. `Other SMTP`_
|
||||||
|
|
||||||
|
@ -106,16 +94,13 @@ frontend_pipeline:
|
||||||
|
|
||||||
1. None
|
1. None
|
||||||
2. `Django Compressor`_
|
2. `Django Compressor`_
|
||||||
3. `Gulp`_
|
3. `Gulp`_: support Bootstrap recompilation with real-time variables alteration.
|
||||||
4. `Webpack`_
|
|
||||||
|
|
||||||
Both Gulp and Webpack support Bootstrap recompilation with real-time variables alteration.
|
|
||||||
|
|
||||||
use_celery:
|
use_celery:
|
||||||
Indicates whether the project should be configured to use Celery_.
|
Indicates whether the project should be configured to use Celery_.
|
||||||
|
|
||||||
use_mailpit:
|
use_mailhog:
|
||||||
Indicates whether the project should be configured to use Mailpit_.
|
Indicates whether the project should be configured to use MailHog_.
|
||||||
|
|
||||||
use_sentry:
|
use_sentry:
|
||||||
Indicates whether the project should be configured to use Sentry_.
|
Indicates whether the project should be configured to use Sentry_.
|
||||||
|
@ -134,7 +119,6 @@ ci_tool:
|
||||||
2. `Travis CI`_
|
2. `Travis CI`_
|
||||||
3. `Gitlab CI`_
|
3. `Gitlab CI`_
|
||||||
4. `Github Actions`_
|
4. `Github Actions`_
|
||||||
5. `Drone CI`_
|
|
||||||
|
|
||||||
keep_local_envs_in_vcs:
|
keep_local_envs_in_vcs:
|
||||||
Indicates whether the project's ``.envs/.local/`` should be kept in VCS
|
Indicates whether the project's ``.envs/.local/`` should be kept in VCS
|
||||||
|
@ -153,20 +137,16 @@ debug:
|
||||||
.. _Apache Software License 2.0: http://www.apache.org/licenses/LICENSE-2.0
|
.. _Apache Software License 2.0: http://www.apache.org/licenses/LICENSE-2.0
|
||||||
|
|
||||||
.. _PyCharm: https://www.jetbrains.com/pycharm/
|
.. _PyCharm: https://www.jetbrains.com/pycharm/
|
||||||
.. _VS Code: https://github.com/microsoft/vscode
|
|
||||||
|
|
||||||
.. _Docker: https://github.com/docker/docker
|
.. _Docker: https://github.com/docker/docker
|
||||||
.. _Docker Compose: https://docs.docker.com/compose/
|
.. _Docker Compose: https://docs.docker.com/compose/
|
||||||
.. _devcontainer: https://containers.dev/
|
|
||||||
|
|
||||||
.. _PostgreSQL: https://www.postgresql.org/docs/
|
.. _PostgreSQL: https://www.postgresql.org/docs/
|
||||||
|
|
||||||
.. _Gulp: https://github.com/gulpjs/gulp
|
.. _Gulp: https://github.com/gulpjs/gulp
|
||||||
.. _Webpack: https://webpack.js.org
|
|
||||||
|
|
||||||
.. _AWS: https://aws.amazon.com/s3/
|
.. _AWS: https://aws.amazon.com/s3/
|
||||||
.. _GCP: https://cloud.google.com/storage/
|
.. _GCP: https://cloud.google.com/storage/
|
||||||
.. _Azure: https://azure.microsoft.com/en-us/products/storage/blobs/
|
|
||||||
|
|
||||||
.. _Amazon SES: https://aws.amazon.com/ses/
|
.. _Amazon SES: https://aws.amazon.com/ses/
|
||||||
.. _Mailgun: https://www.mailgun.com
|
.. _Mailgun: https://www.mailgun.com
|
||||||
|
@ -174,7 +154,7 @@ debug:
|
||||||
.. _Mandrill: http://mandrill.com
|
.. _Mandrill: http://mandrill.com
|
||||||
.. _Postmark: https://postmarkapp.com
|
.. _Postmark: https://postmarkapp.com
|
||||||
.. _SendGrid: https://sendgrid.com
|
.. _SendGrid: https://sendgrid.com
|
||||||
.. _Brevo (formerly SendinBlue): https://www.brevo.com
|
.. _SendinBlue: https://www.sendinblue.com
|
||||||
.. _SparkPost: https://www.sparkpost.com
|
.. _SparkPost: https://www.sparkpost.com
|
||||||
.. _Other SMTP: https://anymail.readthedocs.io/en/stable/
|
.. _Other SMTP: https://anymail.readthedocs.io/en/stable/
|
||||||
|
|
||||||
|
@ -184,7 +164,7 @@ debug:
|
||||||
|
|
||||||
.. _Celery: https://github.com/celery/celery
|
.. _Celery: https://github.com/celery/celery
|
||||||
|
|
||||||
.. _Mailpit: https://github.com/axllent/mailpit
|
.. _MailHog: https://github.com/mailhog/MailHog
|
||||||
|
|
||||||
.. _Sentry: https://github.com/getsentry/sentry
|
.. _Sentry: https://github.com/getsentry/sentry
|
||||||
|
|
||||||
|
@ -196,6 +176,4 @@ debug:
|
||||||
|
|
||||||
.. _GitLab CI: https://docs.gitlab.com/ee/ci/
|
.. _GitLab CI: https://docs.gitlab.com/ee/ci/
|
||||||
|
|
||||||
.. _Drone CI: https://docs.drone.io/pipeline/overview/
|
|
||||||
|
|
||||||
.. _Github Actions: https://docs.github.com/en/actions
|
.. _Github Actions: https://docs.github.com/en/actions
|
2
docs/requirements.txt
Normal file
2
docs/requirements.txt
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
sphinx==4.5.0
|
||||||
|
sphinx-rtd-theme==1.0.0
|
|
@ -22,6 +22,7 @@ DATABASE_URL DATABASES auto w/ Dock
|
||||||
DJANGO_ADMIN_URL n/a 'admin/' raises error
|
DJANGO_ADMIN_URL n/a 'admin/' raises error
|
||||||
DJANGO_DEBUG DEBUG True False
|
DJANGO_DEBUG DEBUG True False
|
||||||
DJANGO_SECRET_KEY SECRET_KEY auto-generated raises error
|
DJANGO_SECRET_KEY SECRET_KEY auto-generated raises error
|
||||||
|
DJANGO_SECURE_BROWSER_XSS_FILTER SECURE_BROWSER_XSS_FILTER n/a True
|
||||||
DJANGO_SECURE_SSL_REDIRECT SECURE_SSL_REDIRECT n/a True
|
DJANGO_SECURE_SSL_REDIRECT SECURE_SSL_REDIRECT n/a True
|
||||||
DJANGO_SECURE_CONTENT_TYPE_NOSNIFF SECURE_CONTENT_TYPE_NOSNIFF n/a True
|
DJANGO_SECURE_CONTENT_TYPE_NOSNIFF SECURE_CONTENT_TYPE_NOSNIFF n/a True
|
||||||
DJANGO_SECURE_FRAME_DENY SECURE_FRAME_DENY n/a True
|
DJANGO_SECURE_FRAME_DENY SECURE_FRAME_DENY n/a True
|
||||||
|
@ -39,17 +40,14 @@ The following table lists settings and their defaults for third-party applicatio
|
||||||
======================================= =========================== ============================================== ======================================================================
|
======================================= =========================== ============================================== ======================================================================
|
||||||
Environment Variable Django Setting Development Default Production Default
|
Environment Variable Django Setting Development Default Production Default
|
||||||
======================================= =========================== ============================================== ======================================================================
|
======================================= =========================== ============================================== ======================================================================
|
||||||
|
CELERY_BROKER_URL CELERY_BROKER_URL auto w/ Docker; raises error w/o raises error
|
||||||
DJANGO_AWS_ACCESS_KEY_ID AWS_ACCESS_KEY_ID n/a raises error
|
DJANGO_AWS_ACCESS_KEY_ID AWS_ACCESS_KEY_ID n/a raises error
|
||||||
DJANGO_AWS_SECRET_ACCESS_KEY AWS_SECRET_ACCESS_KEY n/a raises error
|
DJANGO_AWS_SECRET_ACCESS_KEY AWS_SECRET_ACCESS_KEY n/a raises error
|
||||||
DJANGO_AWS_STORAGE_BUCKET_NAME AWS_STORAGE_BUCKET_NAME n/a raises error
|
DJANGO_AWS_STORAGE_BUCKET_NAME AWS_STORAGE_BUCKET_NAME n/a raises error
|
||||||
DJANGO_AWS_S3_REGION_NAME AWS_S3_REGION_NAME n/a None
|
DJANGO_AWS_S3_REGION_NAME AWS_S3_REGION_NAME n/a None
|
||||||
DJANGO_AWS_S3_CUSTOM_DOMAIN AWS_S3_CUSTOM_DOMAIN n/a None
|
DJANGO_AWS_S3_CUSTOM_DOMAIN AWS_S3_CUSTOM_DOMAIN n/a None
|
||||||
DJANGO_AWS_S3_MAX_MEMORY_SIZE AWS_S3_MAX_MEMORY_SIZE n/a 100_000_000
|
|
||||||
DJANGO_GCP_STORAGE_BUCKET_NAME GS_BUCKET_NAME n/a raises error
|
DJANGO_GCP_STORAGE_BUCKET_NAME GS_BUCKET_NAME n/a raises error
|
||||||
GOOGLE_APPLICATION_CREDENTIALS n/a n/a raises error
|
GOOGLE_APPLICATION_CREDENTIALS n/a n/a raises error
|
||||||
DJANGO_AZURE_ACCOUNT_KEY AZURE_ACCOUNT_KEY n/a raises error
|
|
||||||
DJANGO_AZURE_ACCOUNT_NAME AZURE_ACCOUNT_NAME n/a raises error
|
|
||||||
DJANGO_AZURE_CONTAINER_NAME AZURE_CONTAINER n/a raises error
|
|
||||||
SENTRY_DSN SENTRY_DSN n/a raises error
|
SENTRY_DSN SENTRY_DSN n/a raises error
|
||||||
SENTRY_ENVIRONMENT n/a n/a production
|
SENTRY_ENVIRONMENT n/a n/a production
|
||||||
SENTRY_TRACES_SAMPLE_RATE n/a n/a 0.0
|
SENTRY_TRACES_SAMPLE_RATE n/a n/a 0.0
|
||||||
|
@ -68,8 +66,8 @@ SENDGRID_API_KEY SENDGRID_API_KEY n/a
|
||||||
SENDGRID_GENERATE_MESSAGE_ID True n/a raises error
|
SENDGRID_GENERATE_MESSAGE_ID True n/a raises error
|
||||||
SENDGRID_MERGE_FIELD_FORMAT None n/a raises error
|
SENDGRID_MERGE_FIELD_FORMAT None n/a raises error
|
||||||
SENDGRID_API_URL n/a n/a "https://api.sendgrid.com/v3/"
|
SENDGRID_API_URL n/a n/a "https://api.sendgrid.com/v3/"
|
||||||
BREVO_API_KEY BREVO_API_KEY n/a raises error
|
SENDINBLUE_API_KEY SENDINBLUE_API_KEY n/a raises error
|
||||||
BREVO_API_URL n/a n/a "https://api.brevo.com/v3/"
|
SENDINBLUE_API_URL n/a n/a "https://api.sendinblue.com/v3/"
|
||||||
SPARKPOST_API_KEY SPARKPOST_API_KEY n/a raises error
|
SPARKPOST_API_KEY SPARKPOST_API_KEY n/a raises error
|
||||||
SPARKPOST_API_URL n/a n/a "https://api.sparkpost.com/api/v1"
|
SPARKPOST_API_URL n/a n/a "https://api.sparkpost.com/api/v1"
|
||||||
======================================= =========================== ============================================== ======================================================================
|
======================================= =========================== ============================================== ======================================================================
|
||||||
|
@ -80,6 +78,3 @@ Other Environment Settings
|
||||||
|
|
||||||
DJANGO_ACCOUNT_ALLOW_REGISTRATION (=True)
|
DJANGO_ACCOUNT_ALLOW_REGISTRATION (=True)
|
||||||
Allow enable or disable user registration through `django-allauth` without disabling other characteristics like authentication and account management. (Django Setting: ACCOUNT_ALLOW_REGISTRATION)
|
Allow enable or disable user registration through `django-allauth` without disabling other characteristics like authentication and account management. (Django Setting: ACCOUNT_ALLOW_REGISTRATION)
|
||||||
|
|
||||||
DJANGO_ADMIN_FORCE_ALLAUTH (=False)
|
|
||||||
Force the `admin` sign in process to go through the `django-allauth` workflow.
|
|
|
@ -19,7 +19,7 @@ You will get a readout of the `users` app that has already been set up with test
|
||||||
|
|
||||||
If you set up your project to `develop locally with docker`_, run the following command: ::
|
If you set up your project to `develop locally with docker`_, run the following command: ::
|
||||||
|
|
||||||
$ docker compose -f docker-compose.local.yml run --rm django pytest
|
$ docker-compose -f local.yml run --rm django pytest
|
||||||
|
|
||||||
Targeting particular apps for testing in ``docker`` follows a similar pattern as previously shown above.
|
Targeting particular apps for testing in ``docker`` follows a similar pattern as previously shown above.
|
||||||
|
|
||||||
|
@ -28,22 +28,17 @@ Coverage
|
||||||
|
|
||||||
You should build your tests to provide the highest level of **code coverage**. You can run the ``pytest`` with code ``coverage`` by typing in the following command: ::
|
You should build your tests to provide the highest level of **code coverage**. You can run the ``pytest`` with code ``coverage`` by typing in the following command: ::
|
||||||
|
|
||||||
$ coverage run -m pytest
|
$ docker-compose -f local.yml run --rm django coverage run -m pytest
|
||||||
|
|
||||||
Once the tests are complete, in order to see the code coverage, run the following command: ::
|
Once the tests are complete, in order to see the code coverage, run the following command: ::
|
||||||
|
|
||||||
$ coverage report
|
$ docker-compose -f local.yml run --rm django coverage report
|
||||||
|
|
||||||
If you're running the project locally with Docker, use these commands instead: ::
|
|
||||||
|
|
||||||
$ docker compose -f docker-compose.local.yml run --rm django coverage run -m pytest
|
|
||||||
$ docker compose -f docker-compose.local.yml run --rm django coverage report
|
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
At the root of the project folder, you will find the `pytest.ini` file. You can use this to customize_ the ``pytest`` to your liking.
|
At the root of the project folder, you will find the `pytest.ini` file. You can use this to customize_ the ``pytest`` to your liking.
|
||||||
|
|
||||||
The configuration for ``coverage`` can be found in ``pyproject.toml``. You can find out more about `configuring`_ ``coverage``.
|
There is also the `.coveragerc`. This is the configuration file for the ``coverage`` tool. You can find out more about `configuring`_ ``coverage``.
|
||||||
|
|
||||||
.. seealso::
|
.. seealso::
|
||||||
|
|
||||||
|
@ -58,4 +53,4 @@ If you're running the project locally with Docker, use these commands instead: :
|
||||||
.. _develop locally with docker: ./developing-locally-docker.html
|
.. _develop locally with docker: ./developing-locally-docker.html
|
||||||
.. _customize: https://docs.pytest.org/en/latest/customize.html
|
.. _customize: https://docs.pytest.org/en/latest/customize.html
|
||||||
.. _unittest: https://docs.python.org/3/library/unittest.html#module-unittest
|
.. _unittest: https://docs.python.org/3/library/unittest.html#module-unittest
|
||||||
.. _configuring: https://coverage.readthedocs.io/en/latest/config.html
|
.. _configuring: https://coverage.readthedocs.io/en/v4.5.x/config.html
|
|
@ -1,5 +1,5 @@
|
||||||
Troubleshooting
|
Troubleshooting
|
||||||
===============
|
=====================================
|
||||||
|
|
||||||
This page contains some advice about errors and problems commonly encountered during the development of Cookiecutter Django applications.
|
This page contains some advice about errors and problems commonly encountered during the development of Cookiecutter Django applications.
|
||||||
|
|
||||||
|
@ -8,7 +8,7 @@ Server Error on sign-up/log-in
|
||||||
|
|
||||||
Make sure you have configured the mail backend (e.g. Mailgun) by adding the API key and sender domain
|
Make sure you have configured the mail backend (e.g. Mailgun) by adding the API key and sender domain
|
||||||
|
|
||||||
.. include:: ../includes/mailgun.rst
|
.. include:: mailgun.rst
|
||||||
|
|
||||||
.. _docker-postgres-auth-failed:
|
.. _docker-postgres-auth-failed:
|
||||||
|
|
||||||
|
@ -24,13 +24,13 @@ Examples of logs::
|
||||||
If you recreate the project multiple times with the same name, Docker would preserve the volumes for the postgres container between projects. Here is what happens:
|
If you recreate the project multiple times with the same name, Docker would preserve the volumes for the postgres container between projects. Here is what happens:
|
||||||
|
|
||||||
#. You generate the project the first time. The .env postgres file is populated with the random password
|
#. You generate the project the first time. The .env postgres file is populated with the random password
|
||||||
#. You run the docker compose and the containers are created. The postgres container creates the database based on the .env file credentials
|
#. You run the docker-compose and the containers are created. The postgres container creates the database based on the .env file credentials
|
||||||
#. You "regenerate" the project with the same name, so the postgres .env file is populated with a new random password
|
#. You "regenerate" the project with the same name, so the postgres .env file is populated with a new random password
|
||||||
#. You run docker compose. Since the names of the containers are the same, docker will try to start them (not create them from scratch i.e. it won't execute the Dockerfile to recreate the database). When this happens, it tries to start the database based on the new credentials which do not match the ones that the database was created with, and you get the error message above.
|
#. You run docker-compose. Since the names of the containers are the same, docker will try to start them (not create them from scratch i.e. it won't execute the Dockerfile to recreate the database). When this happens, it tries to start the database based on the new credentials which do not match the ones that the database was created with, and you get the error message above.
|
||||||
|
|
||||||
To fix this, you can either:
|
To fix this, you can either:
|
||||||
|
|
||||||
- Clear your project-related Docker cache with ``docker compose -f docker-compose.local.yml down --volumes --rmi all``.
|
- Clear your project-related Docker cache with ``docker-compose -f local.yml down --volumes --rmi all``.
|
||||||
- Use the Docker volume sub-commands to find volumes (`ls`_) and remove them (`rm`_).
|
- Use the Docker volume sub-commands to find volumes (`ls`_) and remove them (`rm`_).
|
||||||
- Use the `prune`_ command to clear system-wide (use with care!).
|
- Use the `prune`_ command to clear system-wide (use with care!).
|
||||||
|
|
||||||
|
@ -38,16 +38,6 @@ To fix this, you can either:
|
||||||
.. _rm: https://docs.docker.com/engine/reference/commandline/volume_rm/
|
.. _rm: https://docs.docker.com/engine/reference/commandline/volume_rm/
|
||||||
.. _prune: https://docs.docker.com/v17.09/engine/reference/commandline/system_prune/
|
.. _prune: https://docs.docker.com/v17.09/engine/reference/commandline/system_prune/
|
||||||
|
|
||||||
Variable is not set. Defaulting to a blank string
|
|
||||||
-------------------------------------------------
|
|
||||||
|
|
||||||
Example::
|
|
||||||
|
|
||||||
WARN[0000] The "DJANGO_AWS_STORAGE_BUCKET_NAME" variable is not set. Defaulting to a blank string.
|
|
||||||
WARN[0000] The "DJANGO_AWS_S3_CUSTOM_DOMAIN" variable is not set. Defaulting to a blank string.
|
|
||||||
|
|
||||||
You have probably opted for Docker + Webpack without Whitenoise. This is a know limitation of the combination, which needs a little bit of manual intervention. See the :ref:`dedicated section about it <webpack-whitenoise-limitation>`.
|
|
||||||
|
|
||||||
Others
|
Others
|
||||||
------
|
------
|
||||||
|
|
|
@ -1,8 +1,19 @@
|
||||||
import json
|
"""
|
||||||
|
NOTE:
|
||||||
|
the below code is to be maintained Python 2.x-compatible
|
||||||
|
as the whole Cookiecutter Django project initialization
|
||||||
|
can potentially be run in Python 2.x environment
|
||||||
|
(at least so we presume in `pre_gen_project.py`).
|
||||||
|
|
||||||
|
TODO: restrict Cookiecutter Django project initialization to
|
||||||
|
Python 3.x environments only
|
||||||
|
"""
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import os
|
||||||
import random
|
import random
|
||||||
import shutil
|
import shutil
|
||||||
import string
|
import string
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
# Inspired by
|
# Inspired by
|
||||||
|
@ -24,51 +35,35 @@ DEBUG_VALUE = "debug"
|
||||||
def remove_open_source_files():
|
def remove_open_source_files():
|
||||||
file_names = ["CONTRIBUTORS.txt", "LICENSE"]
|
file_names = ["CONTRIBUTORS.txt", "LICENSE"]
|
||||||
for file_name in file_names:
|
for file_name in file_names:
|
||||||
Path(file_name).unlink()
|
os.remove(file_name)
|
||||||
|
|
||||||
|
|
||||||
def remove_gplv3_files():
|
def remove_gplv3_files():
|
||||||
file_names = ["COPYING"]
|
file_names = ["COPYING"]
|
||||||
for file_name in file_names:
|
for file_name in file_names:
|
||||||
Path(file_name).unlink()
|
os.remove(file_name)
|
||||||
|
|
||||||
|
|
||||||
def remove_custom_user_manager_files():
|
|
||||||
users_path = Path("{{cookiecutter.project_slug}}", "users")
|
|
||||||
(users_path / "managers.py").unlink()
|
|
||||||
(users_path / "tests" / "test_managers.py").unlink()
|
|
||||||
|
|
||||||
|
|
||||||
def remove_pycharm_files():
|
def remove_pycharm_files():
|
||||||
idea_dir_path = Path(".idea")
|
idea_dir_path = ".idea"
|
||||||
if idea_dir_path.exists():
|
if os.path.exists(idea_dir_path):
|
||||||
shutil.rmtree(idea_dir_path)
|
shutil.rmtree(idea_dir_path)
|
||||||
|
|
||||||
docs_dir_path = Path("docs", "pycharm")
|
docs_dir_path = os.path.join("docs", "pycharm")
|
||||||
if docs_dir_path.exists():
|
if os.path.exists(docs_dir_path):
|
||||||
shutil.rmtree(docs_dir_path)
|
shutil.rmtree(docs_dir_path)
|
||||||
|
|
||||||
|
|
||||||
def remove_docker_files():
|
def remove_docker_files():
|
||||||
shutil.rmtree(".devcontainer")
|
|
||||||
shutil.rmtree("compose")
|
shutil.rmtree("compose")
|
||||||
|
|
||||||
file_names = [
|
file_names = ["local.yml", "production.yml", ".dockerignore"]
|
||||||
"docker-compose.local.yml",
|
|
||||||
"docker-compose.production.yml",
|
|
||||||
".dockerignore",
|
|
||||||
"justfile",
|
|
||||||
]
|
|
||||||
for file_name in file_names:
|
for file_name in file_names:
|
||||||
Path(file_name).unlink()
|
os.remove(file_name)
|
||||||
if "{{ cookiecutter.editor }}" == "PyCharm":
|
if "{{ cookiecutter.use_pycharm }}".lower() == "y":
|
||||||
file_names = ["docker_compose_up_django.xml", "docker_compose_up_docs.xml"]
|
file_names = ["docker_compose_up_django.xml", "docker_compose_up_docs.xml"]
|
||||||
for file_name in file_names:
|
for file_name in file_names:
|
||||||
Path(".idea", "runConfigurations", file_name).unlink()
|
os.remove(os.path.join(".idea", "runConfigurations", file_name))
|
||||||
|
|
||||||
|
|
||||||
def remove_nginx_docker_files():
|
|
||||||
shutil.rmtree(Path("compose", "production", "nginx"))
|
|
||||||
|
|
||||||
|
|
||||||
def remove_utility_files():
|
def remove_utility_files():
|
||||||
|
@ -76,170 +71,75 @@ def remove_utility_files():
|
||||||
|
|
||||||
|
|
||||||
def remove_heroku_files():
|
def remove_heroku_files():
|
||||||
file_names = ["Procfile", "requirements.txt"]
|
file_names = ["Procfile", "runtime.txt", "requirements.txt"]
|
||||||
for file_name in file_names:
|
for file_name in file_names:
|
||||||
if file_name == "requirements.txt" and "{{ cookiecutter.ci_tool }}".lower() == "travis":
|
if (
|
||||||
|
file_name == "requirements.txt"
|
||||||
|
and "{{ cookiecutter.ci_tool }}".lower() == "travis"
|
||||||
|
):
|
||||||
# don't remove the file if we are using travisci but not using heroku
|
# don't remove the file if we are using travisci but not using heroku
|
||||||
continue
|
continue
|
||||||
Path(file_name).unlink()
|
os.remove(file_name)
|
||||||
|
remove_heroku_build_hooks()
|
||||||
|
|
||||||
|
|
||||||
|
def remove_heroku_build_hooks():
|
||||||
shutil.rmtree("bin")
|
shutil.rmtree("bin")
|
||||||
|
|
||||||
|
|
||||||
def remove_sass_files():
|
|
||||||
shutil.rmtree(Path("{{cookiecutter.project_slug}}", "static", "sass"))
|
|
||||||
|
|
||||||
|
|
||||||
def remove_gulp_files():
|
def remove_gulp_files():
|
||||||
file_names = ["gulpfile.mjs"]
|
file_names = ["gulpfile.js"]
|
||||||
for file_name in file_names:
|
for file_name in file_names:
|
||||||
Path(file_name).unlink()
|
os.remove(file_name)
|
||||||
|
remove_sass_files()
|
||||||
|
|
||||||
|
|
||||||
def remove_webpack_files():
|
def remove_sass_files():
|
||||||
shutil.rmtree("webpack")
|
shutil.rmtree(os.path.join("{{cookiecutter.project_slug}}", "static", "sass"))
|
||||||
remove_vendors_js()
|
|
||||||
|
|
||||||
|
|
||||||
def remove_vendors_js():
|
|
||||||
vendors_js_path = Path("{{ cookiecutter.project_slug }}", "static", "js", "vendors.js")
|
|
||||||
if vendors_js_path.exists():
|
|
||||||
vendors_js_path.unlink()
|
|
||||||
|
|
||||||
|
|
||||||
def remove_packagejson_file():
|
def remove_packagejson_file():
|
||||||
file_names = ["package.json"]
|
file_names = ["package.json"]
|
||||||
for file_name in file_names:
|
for file_name in file_names:
|
||||||
Path(file_name).unlink()
|
os.remove(file_name)
|
||||||
|
|
||||||
|
|
||||||
def update_package_json(remove_dev_deps=None, remove_keys=None, scripts=None):
|
|
||||||
remove_dev_deps = remove_dev_deps or []
|
|
||||||
remove_keys = remove_keys or []
|
|
||||||
scripts = scripts or {}
|
|
||||||
package_json = Path("package.json")
|
|
||||||
content = json.loads(package_json.read_text())
|
|
||||||
for package_name in remove_dev_deps:
|
|
||||||
content["devDependencies"].pop(package_name)
|
|
||||||
for key in remove_keys:
|
|
||||||
content.pop(key)
|
|
||||||
content["scripts"].update(scripts)
|
|
||||||
updated_content = json.dumps(content, ensure_ascii=False, indent=2) + "\n"
|
|
||||||
package_json.write_text(updated_content)
|
|
||||||
|
|
||||||
|
|
||||||
def handle_js_runner(choice, use_docker, use_async):
|
|
||||||
if choice == "Gulp":
|
|
||||||
update_package_json(
|
|
||||||
remove_dev_deps=[
|
|
||||||
"@babel/core",
|
|
||||||
"@babel/preset-env",
|
|
||||||
"babel-loader",
|
|
||||||
"concurrently",
|
|
||||||
"css-loader",
|
|
||||||
"mini-css-extract-plugin",
|
|
||||||
"postcss-loader",
|
|
||||||
"postcss-preset-env",
|
|
||||||
"sass-loader",
|
|
||||||
"webpack",
|
|
||||||
"webpack-bundle-tracker",
|
|
||||||
"webpack-cli",
|
|
||||||
"webpack-dev-server",
|
|
||||||
"webpack-merge",
|
|
||||||
],
|
|
||||||
remove_keys=["babel"],
|
|
||||||
scripts={
|
|
||||||
"dev": "gulp",
|
|
||||||
"build": "gulp build",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
remove_webpack_files()
|
|
||||||
elif choice == "Webpack":
|
|
||||||
scripts = {
|
|
||||||
"dev": "webpack serve --config webpack/dev.config.js",
|
|
||||||
"build": "webpack --config webpack/prod.config.js",
|
|
||||||
}
|
|
||||||
remove_dev_deps = [
|
|
||||||
"browser-sync",
|
|
||||||
"cssnano",
|
|
||||||
"gulp",
|
|
||||||
"gulp-concat",
|
|
||||||
"gulp-imagemin",
|
|
||||||
"gulp-plumber",
|
|
||||||
"gulp-postcss",
|
|
||||||
"gulp-rename",
|
|
||||||
"gulp-sass",
|
|
||||||
"gulp-uglify-es",
|
|
||||||
]
|
|
||||||
if not use_docker:
|
|
||||||
dev_django_cmd = (
|
|
||||||
"uvicorn config.asgi:application --reload" if use_async else "python manage.py runserver_plus"
|
|
||||||
)
|
|
||||||
scripts.update(
|
|
||||||
{
|
|
||||||
"dev": "concurrently npm:dev:*",
|
|
||||||
"dev:webpack": "webpack serve --config webpack/dev.config.js",
|
|
||||||
"dev:django": dev_django_cmd,
|
|
||||||
}
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
remove_dev_deps.append("concurrently")
|
|
||||||
update_package_json(remove_dev_deps=remove_dev_deps, scripts=scripts)
|
|
||||||
remove_gulp_files()
|
|
||||||
|
|
||||||
|
|
||||||
def remove_prettier_pre_commit():
|
|
||||||
pre_commit_yaml = Path(".pre-commit-config.yaml")
|
|
||||||
content = pre_commit_yaml.read_text().splitlines()
|
|
||||||
|
|
||||||
removing = False
|
|
||||||
new_lines = []
|
|
||||||
for line in content:
|
|
||||||
if removing and "- repo:" in line:
|
|
||||||
removing = False
|
|
||||||
if "mirrors-prettier" in line:
|
|
||||||
removing = True
|
|
||||||
if not removing:
|
|
||||||
new_lines.append(line)
|
|
||||||
|
|
||||||
pre_commit_yaml.write_text("\n".join(new_lines))
|
|
||||||
|
|
||||||
|
|
||||||
def remove_celery_files():
|
def remove_celery_files():
|
||||||
file_paths = [
|
file_names = [
|
||||||
Path("config", "celery_app.py"),
|
os.path.join("config", "celery_app.py"),
|
||||||
Path("{{ cookiecutter.project_slug }}", "users", "tasks.py"),
|
os.path.join("{{ cookiecutter.project_slug }}", "users", "tasks.py"),
|
||||||
Path("{{ cookiecutter.project_slug }}", "users", "tests", "test_tasks.py"),
|
os.path.join(
|
||||||
|
"{{ cookiecutter.project_slug }}", "users", "tests", "test_tasks.py"
|
||||||
|
),
|
||||||
]
|
]
|
||||||
for file_path in file_paths:
|
for file_name in file_names:
|
||||||
file_path.unlink()
|
os.remove(file_name)
|
||||||
|
|
||||||
|
|
||||||
def remove_async_files():
|
def remove_async_files():
|
||||||
file_paths = [
|
file_names = [
|
||||||
Path("config", "asgi.py"),
|
os.path.join("config", "asgi.py"),
|
||||||
Path("config", "websocket.py"),
|
os.path.join("config", "websocket.py"),
|
||||||
]
|
]
|
||||||
for file_path in file_paths:
|
for file_name in file_names:
|
||||||
file_path.unlink()
|
os.remove(file_name)
|
||||||
|
|
||||||
|
|
||||||
def remove_dottravisyml_file():
|
def remove_dottravisyml_file():
|
||||||
Path(".travis.yml").unlink()
|
os.remove(".travis.yml")
|
||||||
|
|
||||||
|
|
||||||
def remove_dotgitlabciyml_file():
|
def remove_dotgitlabciyml_file():
|
||||||
Path(".gitlab-ci.yml").unlink()
|
os.remove(".gitlab-ci.yml")
|
||||||
|
|
||||||
|
|
||||||
def remove_dotgithub_folder():
|
def remove_dotgithub_folder():
|
||||||
shutil.rmtree(".github")
|
shutil.rmtree(".github")
|
||||||
|
|
||||||
|
|
||||||
def remove_dotdrone_file():
|
def generate_random_string(
|
||||||
Path(".drone.yml").unlink()
|
length, using_digits=False, using_ascii_letters=False, using_punctuation=False
|
||||||
|
):
|
||||||
|
|
||||||
def generate_random_string(length, using_digits=False, using_ascii_letters=False, using_punctuation=False):
|
|
||||||
"""
|
"""
|
||||||
Example:
|
Example:
|
||||||
opting out for 50 symbol-long, [a-z][A-Z][0-9] string
|
opting out for 50 symbol-long, [a-z][A-Z][0-9] string
|
||||||
|
@ -262,7 +162,7 @@ def generate_random_string(length, using_digits=False, using_ascii_letters=False
|
||||||
return "".join([random.choice(symbols) for _ in range(length)])
|
return "".join([random.choice(symbols) for _ in range(length)])
|
||||||
|
|
||||||
|
|
||||||
def set_flag(file_path: Path, flag, value=None, formatted=None, *args, **kwargs):
|
def set_flag(file_path, flag, value=None, formatted=None, *args, **kwargs):
|
||||||
if value is None:
|
if value is None:
|
||||||
random_string = generate_random_string(*args, **kwargs)
|
random_string = generate_random_string(*args, **kwargs)
|
||||||
if random_string is None:
|
if random_string is None:
|
||||||
|
@ -275,7 +175,7 @@ def set_flag(file_path: Path, flag, value=None, formatted=None, *args, **kwargs)
|
||||||
random_string = formatted.format(random_string)
|
random_string = formatted.format(random_string)
|
||||||
value = random_string
|
value = random_string
|
||||||
|
|
||||||
with file_path.open("r+") as f:
|
with open(file_path, "r+") as f:
|
||||||
file_contents = f.read().replace(flag, value)
|
file_contents = f.read().replace(flag, value)
|
||||||
f.seek(0)
|
f.seek(0)
|
||||||
f.write(file_contents)
|
f.write(file_contents)
|
||||||
|
@ -284,7 +184,7 @@ def set_flag(file_path: Path, flag, value=None, formatted=None, *args, **kwargs)
|
||||||
return value
|
return value
|
||||||
|
|
||||||
|
|
||||||
def set_django_secret_key(file_path: Path):
|
def set_django_secret_key(file_path):
|
||||||
django_secret_key = set_flag(
|
django_secret_key = set_flag(
|
||||||
file_path,
|
file_path,
|
||||||
"!!!SET DJANGO_SECRET_KEY!!!",
|
"!!!SET DJANGO_SECRET_KEY!!!",
|
||||||
|
@ -295,7 +195,7 @@ def set_django_secret_key(file_path: Path):
|
||||||
return django_secret_key
|
return django_secret_key
|
||||||
|
|
||||||
|
|
||||||
def set_django_admin_url(file_path: Path):
|
def set_django_admin_url(file_path):
|
||||||
django_admin_url = set_flag(
|
django_admin_url = set_flag(
|
||||||
file_path,
|
file_path,
|
||||||
"!!!SET DJANGO_ADMIN_URL!!!",
|
"!!!SET DJANGO_ADMIN_URL!!!",
|
||||||
|
@ -333,7 +233,9 @@ def set_postgres_password(file_path, value=None):
|
||||||
|
|
||||||
|
|
||||||
def set_celery_flower_user(file_path, value):
|
def set_celery_flower_user(file_path, value):
|
||||||
celery_flower_user = set_flag(file_path, "!!!SET CELERY_FLOWER_USER!!!", value=value)
|
celery_flower_user = set_flag(
|
||||||
|
file_path, "!!!SET CELERY_FLOWER_USER!!!", value=value
|
||||||
|
)
|
||||||
return celery_flower_user
|
return celery_flower_user
|
||||||
|
|
||||||
|
|
||||||
|
@ -350,59 +252,84 @@ def set_celery_flower_password(file_path, value=None):
|
||||||
|
|
||||||
|
|
||||||
def append_to_gitignore_file(ignored_line):
|
def append_to_gitignore_file(ignored_line):
|
||||||
with Path(".gitignore").open("a") as gitignore_file:
|
with open(".gitignore", "a") as gitignore_file:
|
||||||
gitignore_file.write(ignored_line)
|
gitignore_file.write(ignored_line)
|
||||||
gitignore_file.write("\n")
|
gitignore_file.write("\n")
|
||||||
|
|
||||||
|
|
||||||
def set_flags_in_envs(postgres_user, celery_flower_user, debug=False):
|
def set_flags_in_envs(postgres_user, celery_flower_user, debug=False):
|
||||||
local_django_envs_path = Path(".envs", ".local", ".django")
|
local_django_envs_path = os.path.join(".envs", ".local", ".django")
|
||||||
production_django_envs_path = Path(".envs", ".production", ".django")
|
production_django_envs_path = os.path.join(".envs", ".production", ".django")
|
||||||
local_postgres_envs_path = Path(".envs", ".local", ".postgres")
|
local_postgres_envs_path = os.path.join(".envs", ".local", ".postgres")
|
||||||
production_postgres_envs_path = Path(".envs", ".production", ".postgres")
|
production_postgres_envs_path = os.path.join(".envs", ".production", ".postgres")
|
||||||
|
|
||||||
set_django_secret_key(production_django_envs_path)
|
set_django_secret_key(production_django_envs_path)
|
||||||
set_django_admin_url(production_django_envs_path)
|
set_django_admin_url(production_django_envs_path)
|
||||||
|
|
||||||
set_postgres_user(local_postgres_envs_path, value=postgres_user)
|
set_postgres_user(local_postgres_envs_path, value=postgres_user)
|
||||||
set_postgres_password(local_postgres_envs_path, value=DEBUG_VALUE if debug else None)
|
set_postgres_password(
|
||||||
|
local_postgres_envs_path, value=DEBUG_VALUE if debug else None
|
||||||
|
)
|
||||||
set_postgres_user(production_postgres_envs_path, value=postgres_user)
|
set_postgres_user(production_postgres_envs_path, value=postgres_user)
|
||||||
set_postgres_password(production_postgres_envs_path, value=DEBUG_VALUE if debug else None)
|
set_postgres_password(
|
||||||
|
production_postgres_envs_path, value=DEBUG_VALUE if debug else None
|
||||||
|
)
|
||||||
|
|
||||||
set_celery_flower_user(local_django_envs_path, value=celery_flower_user)
|
set_celery_flower_user(local_django_envs_path, value=celery_flower_user)
|
||||||
set_celery_flower_password(local_django_envs_path, value=DEBUG_VALUE if debug else None)
|
set_celery_flower_password(
|
||||||
|
local_django_envs_path, value=DEBUG_VALUE if debug else None
|
||||||
|
)
|
||||||
set_celery_flower_user(production_django_envs_path, value=celery_flower_user)
|
set_celery_flower_user(production_django_envs_path, value=celery_flower_user)
|
||||||
set_celery_flower_password(production_django_envs_path, value=DEBUG_VALUE if debug else None)
|
set_celery_flower_password(
|
||||||
|
production_django_envs_path, value=DEBUG_VALUE if debug else None
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def set_flags_in_settings_files():
|
def set_flags_in_settings_files():
|
||||||
set_django_secret_key(Path("config", "settings", "local.py"))
|
set_django_secret_key(os.path.join("config", "settings", "local.py"))
|
||||||
set_django_secret_key(Path("config", "settings", "test.py"))
|
set_django_secret_key(os.path.join("config", "settings", "test.py"))
|
||||||
|
|
||||||
|
|
||||||
def remove_envs_and_associated_files():
|
def remove_envs_and_associated_files():
|
||||||
shutil.rmtree(".envs")
|
shutil.rmtree(".envs")
|
||||||
Path("merge_production_dotenvs_in_dotenv.py").unlink()
|
os.remove("merge_production_dotenvs_in_dotenv.py")
|
||||||
shutil.rmtree("tests")
|
|
||||||
|
|
||||||
|
|
||||||
def remove_celery_compose_dirs():
|
def remove_celery_compose_dirs():
|
||||||
shutil.rmtree(Path("compose", "local", "django", "celery"))
|
shutil.rmtree(os.path.join("compose", "local", "django", "celery"))
|
||||||
shutil.rmtree(Path("compose", "production", "django", "celery"))
|
shutil.rmtree(os.path.join("compose", "production", "django", "celery"))
|
||||||
|
|
||||||
|
|
||||||
def remove_node_dockerfile():
|
def remove_node_dockerfile():
|
||||||
shutil.rmtree(Path("compose", "local", "node"))
|
shutil.rmtree(os.path.join("compose", "local", "node"))
|
||||||
|
|
||||||
|
|
||||||
def remove_aws_dockerfile():
|
def remove_aws_dockerfile():
|
||||||
shutil.rmtree(Path("compose", "production", "aws"))
|
shutil.rmtree(os.path.join("compose", "production", "aws"))
|
||||||
|
|
||||||
|
|
||||||
def remove_drf_starter_files():
|
def remove_drf_starter_files():
|
||||||
Path("config", "api_router.py").unlink()
|
os.remove(os.path.join("config", "api_router.py"))
|
||||||
shutil.rmtree(Path("{{cookiecutter.project_slug}}", "users", "api"))
|
shutil.rmtree(os.path.join("{{cookiecutter.project_slug}}", "users", "api"))
|
||||||
shutil.rmtree(Path("{{cookiecutter.project_slug}}", "users", "tests", "api"))
|
os.remove(
|
||||||
|
os.path.join(
|
||||||
|
"{{cookiecutter.project_slug}}", "users", "tests", "test_drf_urls.py"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
os.remove(
|
||||||
|
os.path.join(
|
||||||
|
"{{cookiecutter.project_slug}}", "users", "tests", "test_drf_views.py"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
os.remove(
|
||||||
|
os.path.join(
|
||||||
|
"{{cookiecutter.project_slug}}", "users", "tests", "test_swagger.py"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def remove_storages_module():
|
||||||
|
os.remove(os.path.join("{{cookiecutter.project_slug}}", "utils", "storages.py"))
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main():
|
||||||
|
@ -420,31 +347,34 @@ def main():
|
||||||
if "{{ cookiecutter.open_source_license}}" != "GPLv3":
|
if "{{ cookiecutter.open_source_license}}" != "GPLv3":
|
||||||
remove_gplv3_files()
|
remove_gplv3_files()
|
||||||
|
|
||||||
if "{{ cookiecutter.username_type }}" == "username":
|
if "{{ cookiecutter.use_pycharm }}".lower() == "n":
|
||||||
remove_custom_user_manager_files()
|
|
||||||
|
|
||||||
if "{{ cookiecutter.editor }}" != "PyCharm":
|
|
||||||
remove_pycharm_files()
|
remove_pycharm_files()
|
||||||
|
|
||||||
if "{{ cookiecutter.use_docker }}".lower() == "y":
|
if "{{ cookiecutter.use_docker }}".lower() == "y":
|
||||||
remove_utility_files()
|
remove_utility_files()
|
||||||
if "{{ cookiecutter.cloud_provider }}".lower() != "none":
|
|
||||||
remove_nginx_docker_files()
|
|
||||||
else:
|
else:
|
||||||
remove_docker_files()
|
remove_docker_files()
|
||||||
|
|
||||||
if "{{ cookiecutter.use_docker }}".lower() == "y" and "{{ cookiecutter.cloud_provider}}" != "AWS":
|
if (
|
||||||
|
"{{ cookiecutter.use_docker }}".lower() == "y"
|
||||||
|
and "{{ cookiecutter.cloud_provider}}" != "AWS"
|
||||||
|
):
|
||||||
remove_aws_dockerfile()
|
remove_aws_dockerfile()
|
||||||
|
|
||||||
if "{{ cookiecutter.use_heroku }}".lower() == "n":
|
if "{{ cookiecutter.use_heroku }}".lower() == "n":
|
||||||
remove_heroku_files()
|
remove_heroku_files()
|
||||||
|
elif "{{ cookiecutter.frontend_pipeline }}" != "Django Compressor":
|
||||||
|
remove_heroku_build_hooks()
|
||||||
|
|
||||||
if "{{ cookiecutter.use_docker }}".lower() == "n" and "{{ cookiecutter.use_heroku }}".lower() == "n":
|
if (
|
||||||
|
"{{ cookiecutter.use_docker }}".lower() == "n"
|
||||||
|
and "{{ cookiecutter.use_heroku }}".lower() == "n"
|
||||||
|
):
|
||||||
if "{{ cookiecutter.keep_local_envs_in_vcs }}".lower() == "y":
|
if "{{ cookiecutter.keep_local_envs_in_vcs }}".lower() == "y":
|
||||||
print(
|
print(
|
||||||
INFO + ".env(s) are only utilized when Docker Compose and/or "
|
INFO + ".env(s) are only utilized when Docker Compose and/or "
|
||||||
"Heroku support is enabled so keeping them does not make sense "
|
"Heroku support is enabled so keeping them does not "
|
||||||
"given your current setup." + TERMINATOR
|
"make sense given your current setup." + TERMINATOR
|
||||||
)
|
)
|
||||||
remove_envs_and_associated_files()
|
remove_envs_and_associated_files()
|
||||||
else:
|
else:
|
||||||
|
@ -453,26 +383,18 @@ def main():
|
||||||
if "{{ cookiecutter.keep_local_envs_in_vcs }}".lower() == "y":
|
if "{{ cookiecutter.keep_local_envs_in_vcs }}".lower() == "y":
|
||||||
append_to_gitignore_file("!.envs/.local/")
|
append_to_gitignore_file("!.envs/.local/")
|
||||||
|
|
||||||
if "{{ cookiecutter.frontend_pipeline }}" in ["None", "Django Compressor"]:
|
if "{{ cookiecutter.frontend_pipeline }}" != "Gulp":
|
||||||
remove_gulp_files()
|
remove_gulp_files()
|
||||||
remove_webpack_files()
|
|
||||||
remove_sass_files()
|
|
||||||
remove_packagejson_file()
|
remove_packagejson_file()
|
||||||
remove_prettier_pre_commit()
|
|
||||||
if "{{ cookiecutter.use_docker }}".lower() == "y":
|
if "{{ cookiecutter.use_docker }}".lower() == "y":
|
||||||
remove_node_dockerfile()
|
remove_node_dockerfile()
|
||||||
else:
|
|
||||||
handle_js_runner(
|
|
||||||
"{{ cookiecutter.frontend_pipeline }}",
|
|
||||||
use_docker=("{{ cookiecutter.use_docker }}".lower() == "y"),
|
|
||||||
use_async=("{{ cookiecutter.use_async }}".lower() == "y"),
|
|
||||||
)
|
|
||||||
|
|
||||||
if "{{ cookiecutter.cloud_provider }}" == "None" and "{{ cookiecutter.use_docker }}".lower() == "n":
|
if "{{ cookiecutter.cloud_provider}}" == "None":
|
||||||
print(
|
print(
|
||||||
WARNING + "You chose to not use any cloud providers nor Docker, "
|
WARNING + "You chose not to use a cloud provider, "
|
||||||
"media files won't be served in production." + TERMINATOR
|
"media files won't be served in production." + TERMINATOR
|
||||||
)
|
)
|
||||||
|
remove_storages_module()
|
||||||
|
|
||||||
if "{{ cookiecutter.use_celery }}".lower() == "n":
|
if "{{ cookiecutter.use_celery }}".lower() == "n":
|
||||||
remove_celery_files()
|
remove_celery_files()
|
||||||
|
@ -488,9 +410,6 @@ def main():
|
||||||
if "{{ cookiecutter.ci_tool }}" != "Github":
|
if "{{ cookiecutter.ci_tool }}" != "Github":
|
||||||
remove_dotgithub_folder()
|
remove_dotgithub_folder()
|
||||||
|
|
||||||
if "{{ cookiecutter.ci_tool }}" != "Drone":
|
|
||||||
remove_dotdrone_file()
|
|
||||||
|
|
||||||
if "{{ cookiecutter.use_drf }}".lower() == "n":
|
if "{{ cookiecutter.use_drf }}".lower() == "n":
|
||||||
remove_drf_starter_files()
|
remove_drf_starter_files()
|
||||||
|
|
||||||
|
|
|
@ -1,3 +1,14 @@
|
||||||
|
"""
|
||||||
|
NOTE:
|
||||||
|
the below code is to be maintained Python 2.x-compatible
|
||||||
|
as the whole Cookiecutter Django project initialization
|
||||||
|
can potentially be run in Python 2.x environment.
|
||||||
|
|
||||||
|
TODO: restrict Cookiecutter Django project initialization
|
||||||
|
to Python 3.x environments only
|
||||||
|
"""
|
||||||
|
from __future__ import print_function
|
||||||
|
|
||||||
import sys
|
import sys
|
||||||
|
|
||||||
TERMINATOR = "\x1b[0m"
|
TERMINATOR = "\x1b[0m"
|
||||||
|
@ -6,26 +17,69 @@ INFO = "\x1b[1;33m [INFO]: "
|
||||||
HINT = "\x1b[3;33m"
|
HINT = "\x1b[3;33m"
|
||||||
SUCCESS = "\x1b[1;32m [SUCCESS]: "
|
SUCCESS = "\x1b[1;32m [SUCCESS]: "
|
||||||
|
|
||||||
# The content of this string is evaluated by Jinja, and plays an important role.
|
|
||||||
# It updates the cookiecutter context to trim leading and trailing spaces
|
|
||||||
# from domain/email values
|
|
||||||
"""
|
|
||||||
{{ cookiecutter.update({ "domain_name": cookiecutter.domain_name | trim }) }}
|
|
||||||
{{ cookiecutter.update({ "email": cookiecutter.email | trim }) }}
|
|
||||||
"""
|
|
||||||
|
|
||||||
project_slug = "{{ cookiecutter.project_slug }}"
|
project_slug = "{{ cookiecutter.project_slug }}"
|
||||||
if hasattr(project_slug, "isidentifier"):
|
if hasattr(project_slug, "isidentifier"):
|
||||||
assert project_slug.isidentifier(), "'{}' project slug is not a valid Python identifier.".format(project_slug)
|
assert (
|
||||||
|
project_slug.isidentifier()
|
||||||
|
), "'{}' project slug is not a valid Python identifier.".format(project_slug)
|
||||||
|
|
||||||
assert project_slug == project_slug.lower(), "'{}' project slug should be all lowercase".format(project_slug)
|
assert (
|
||||||
|
project_slug == project_slug.lower()
|
||||||
|
), "'{}' project slug should be all lowercase".format(project_slug)
|
||||||
|
|
||||||
assert "\\" not in "{{ cookiecutter.author_name }}", "Don't include backslashes in author name."
|
assert (
|
||||||
|
"\\" not in "{{ cookiecutter.author_name }}"
|
||||||
|
), "Don't include backslashes in author name."
|
||||||
|
|
||||||
if "{{ cookiecutter.use_whitenoise }}".lower() == "n" and "{{ cookiecutter.cloud_provider }}" == "None":
|
if "{{ cookiecutter.use_docker }}".lower() == "n":
|
||||||
print("You should either use Whitenoise or select a Cloud Provider to serve static files")
|
python_major_version = sys.version_info[0]
|
||||||
|
if python_major_version == 2:
|
||||||
|
print(
|
||||||
|
WARNING + "You're running cookiecutter under Python 2, but the generated "
|
||||||
|
"project requires Python 3.9+. Do you want to proceed (y/n)? " + TERMINATOR
|
||||||
|
)
|
||||||
|
yes_options, no_options = frozenset(["y"]), frozenset(["n"])
|
||||||
|
while True:
|
||||||
|
choice = raw_input().lower() # noqa: F821
|
||||||
|
if choice in yes_options:
|
||||||
|
break
|
||||||
|
|
||||||
|
elif choice in no_options:
|
||||||
|
print(INFO + "Generation process stopped as requested." + TERMINATOR)
|
||||||
|
sys.exit(1)
|
||||||
|
else:
|
||||||
|
print(
|
||||||
|
HINT
|
||||||
|
+ "Please respond with {} or {}: ".format(
|
||||||
|
", ".join(
|
||||||
|
["'{}'".format(o) for o in yes_options if not o == ""]
|
||||||
|
),
|
||||||
|
", ".join(
|
||||||
|
["'{}'".format(o) for o in no_options if not o == ""]
|
||||||
|
),
|
||||||
|
)
|
||||||
|
+ TERMINATOR
|
||||||
|
)
|
||||||
|
|
||||||
|
if (
|
||||||
|
"{{ cookiecutter.use_whitenoise }}".lower() == "n"
|
||||||
|
and "{{ cookiecutter.cloud_provider }}" == "None"
|
||||||
|
):
|
||||||
|
print(
|
||||||
|
"You should either use Whitenoise or select a "
|
||||||
|
"Cloud Provider to serve static files"
|
||||||
|
)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if "{{ cookiecutter.mail_service }}" == "Amazon SES" and "{{ cookiecutter.cloud_provider }}" != "AWS":
|
if (
|
||||||
print("You should either use AWS or select a different Mail Service for sending emails.")
|
"{{ cookiecutter.cloud_provider }}" == "GCP"
|
||||||
|
and "{{ cookiecutter.mail_service }}" == "Amazon SES"
|
||||||
|
) or (
|
||||||
|
"{{ cookiecutter.cloud_provider }}" == "None"
|
||||||
|
and "{{ cookiecutter.mail_service }}" == "Amazon SES"
|
||||||
|
):
|
||||||
|
print(
|
||||||
|
"You should either use AWS or select a different "
|
||||||
|
"Mail Service for sending emails."
|
||||||
|
)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
114
pyproject.toml
114
pyproject.toml
|
@ -1,114 +0,0 @@
|
||||||
[project]
|
|
||||||
name = "cookiecutter-django"
|
|
||||||
version = "2025.07.25"
|
|
||||||
description = "A Cookiecutter template for creating production-ready Django projects quickly."
|
|
||||||
readme = "README.md"
|
|
||||||
keywords = [
|
|
||||||
"cookiecutter",
|
|
||||||
"django",
|
|
||||||
"project template",
|
|
||||||
"scaffolding",
|
|
||||||
"skeleton",
|
|
||||||
]
|
|
||||||
license = { text = "BSD" }
|
|
||||||
authors = [
|
|
||||||
{ name = "Daniel Roy Greenfeld", email = "pydanny@gmail.com" },
|
|
||||||
]
|
|
||||||
requires-python = ">=3.12,<3.13"
|
|
||||||
classifiers = [
|
|
||||||
"Development Status :: 4 - Beta",
|
|
||||||
"Environment :: Console",
|
|
||||||
"Framework :: Django :: 5.0",
|
|
||||||
"Intended Audience :: Developers",
|
|
||||||
"License :: OSI Approved :: BSD License",
|
|
||||||
"Natural Language :: English",
|
|
||||||
"Programming Language :: Python",
|
|
||||||
"Programming Language :: Python :: 3 :: Only",
|
|
||||||
"Programming Language :: Python :: 3.12",
|
|
||||||
"Programming Language :: Python :: Implementation :: CPython",
|
|
||||||
"Topic :: Software Development",
|
|
||||||
]
|
|
||||||
dependencies = [
|
|
||||||
"binaryornot==0.4.4",
|
|
||||||
"cookiecutter==2.6",
|
|
||||||
"django-upgrade==1.22.2",
|
|
||||||
"djlint==1.36.4",
|
|
||||||
"gitpython==3.1.43",
|
|
||||||
"jinja2==3.1.5",
|
|
||||||
"pre-commit==4.1.0",
|
|
||||||
"pygithub==2.5",
|
|
||||||
"pytest==8.3.4",
|
|
||||||
"pytest-cookies==0.7",
|
|
||||||
"pytest-instafail==0.5",
|
|
||||||
"pytest-xdist==3.6.1",
|
|
||||||
"pyyaml==6.0.2",
|
|
||||||
"requests==2.32.3",
|
|
||||||
"ruff==0.12.5",
|
|
||||||
"sh==2.1; sys_platform!='win23'",
|
|
||||||
"tox==4.23.2",
|
|
||||||
"tox-uv>=1.17",
|
|
||||||
]
|
|
||||||
urls = { Repository = "https://github.com/cookiecutter/cookiecutter-django" }
|
|
||||||
|
|
||||||
[dependency-groups]
|
|
||||||
docs = [
|
|
||||||
"myst-parser>=4",
|
|
||||||
"sphinx>=8.0.2",
|
|
||||||
"sphinx-autobuild>=2024.10.3",
|
|
||||||
"sphinx-rtd-theme>=3",
|
|
||||||
]
|
|
||||||
|
|
||||||
[tool.black]
|
|
||||||
line-length = 119
|
|
||||||
target-version = [
|
|
||||||
'py312',
|
|
||||||
]
|
|
||||||
|
|
||||||
# ==== isort ====
|
|
||||||
|
|
||||||
[tool.isort]
|
|
||||||
profile = "black"
|
|
||||||
line_length = 119
|
|
||||||
known_first_party = [
|
|
||||||
"tests",
|
|
||||||
"scripts",
|
|
||||||
"hooks",
|
|
||||||
]
|
|
||||||
|
|
||||||
[tool.pyproject-fmt]
|
|
||||||
keep_full_version = true
|
|
||||||
|
|
||||||
# ==== pytest ====
|
|
||||||
|
|
||||||
[tool.pytest.ini_options]
|
|
||||||
addopts = "-v --tb=short"
|
|
||||||
norecursedirs = [
|
|
||||||
".tox",
|
|
||||||
".git",
|
|
||||||
"*/migrations/*",
|
|
||||||
"*/static/*",
|
|
||||||
"docs",
|
|
||||||
"venv",
|
|
||||||
"*/{{cookiecutter.project_slug}}/*",
|
|
||||||
]
|
|
||||||
|
|
||||||
# ==== djLint ====
|
|
||||||
|
|
||||||
[tool.djlint]
|
|
||||||
blank_line_after_tag = "load,extends"
|
|
||||||
close_void_tags = true
|
|
||||||
format_css = true
|
|
||||||
format_js = true
|
|
||||||
# TODO: remove T002 when fixed https://github.com/Riverside-Healthcare/djLint/issues/687
|
|
||||||
ignore = "H006,H030,H031,T002,T028"
|
|
||||||
ignore_blocks = "raw"
|
|
||||||
include = "H017,H035"
|
|
||||||
indent = 2
|
|
||||||
max_line_length = 119
|
|
||||||
profile = "jinja"
|
|
||||||
|
|
||||||
[tool.djlint.css]
|
|
||||||
indent_size = 2
|
|
||||||
|
|
||||||
[tool.djlint.js]
|
|
||||||
indent_size = 2
|
|
3
pytest.ini
Normal file
3
pytest.ini
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
[pytest]
|
||||||
|
addopts = -v --tb=short
|
||||||
|
norecursedirs = .tox .git */migrations/* */static/* docs venv */{{cookiecutter.project_slug}}/*
|
26
requirements.txt
Normal file
26
requirements.txt
Normal file
|
@ -0,0 +1,26 @@
|
||||||
|
cookiecutter==1.7.3
|
||||||
|
sh==1.14.2
|
||||||
|
binaryornot==0.4.4
|
||||||
|
|
||||||
|
# Code quality
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
black==22.3.0
|
||||||
|
isort==5.10.1
|
||||||
|
flake8==4.0.1
|
||||||
|
flake8-isort==4.1.1
|
||||||
|
pre-commit==2.18.1
|
||||||
|
|
||||||
|
# Testing
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
tox==3.24.5
|
||||||
|
pytest==7.1.1
|
||||||
|
pytest-cookies==0.6.1
|
||||||
|
pytest-instafail==0.4.2
|
||||||
|
pyyaml==6.0
|
||||||
|
|
||||||
|
# Scripting
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
PyGithub==1.55
|
||||||
|
gitpython==3.1.27
|
||||||
|
jinja2==3.1.1
|
||||||
|
requests==2.27.1
|
|
@ -6,21 +6,18 @@ patches, only comparing major and minor version numbers.
|
||||||
This script handles when there are multiple Django versions that need
|
This script handles when there are multiple Django versions that need
|
||||||
to keep up to date.
|
to keep up to date.
|
||||||
"""
|
"""
|
||||||
|
|
||||||
from __future__ import annotations
|
from __future__ import annotations
|
||||||
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import TYPE_CHECKING, Any, NamedTuple
|
from typing import TYPE_CHECKING, Any, Iterable, NamedTuple
|
||||||
|
|
||||||
import requests
|
import requests
|
||||||
from github import Github
|
from github import Github
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
from collections.abc import Iterable
|
|
||||||
|
|
||||||
from github.Issue import Issue
|
from github.Issue import Issue
|
||||||
|
|
||||||
CURRENT_FILE = Path(__file__)
|
CURRENT_FILE = Path(__file__)
|
||||||
|
@ -50,11 +47,6 @@ class DjVersion(NamedTuple):
|
||||||
major, minor, *_ = version_str.split(".")
|
major, minor, *_ = version_str.split(".")
|
||||||
return cls(major=int(major), minor=int(minor))
|
return cls(major=int(major), minor=int(minor))
|
||||||
|
|
||||||
@classmethod
|
|
||||||
def parse_to_tuple(cls, version_str: str):
|
|
||||||
version = cls.parse(version_str=version_str)
|
|
||||||
return version.major, version.minor
|
|
||||||
|
|
||||||
|
|
||||||
def get_package_info(package: str) -> dict:
|
def get_package_info(package: str) -> dict:
|
||||||
"""Get package metadata using PyPI API."""
|
"""Get package metadata using PyPI API."""
|
||||||
|
@ -83,22 +75,17 @@ def get_name_and_version(requirements_line: str) -> tuple[str, ...]:
|
||||||
return name_without_extras, version
|
return name_without_extras, version
|
||||||
|
|
||||||
|
|
||||||
def get_all_latest_django_versions(
|
def get_all_latest_django_versions() -> tuple[DjVersion, list[DjVersion]]:
|
||||||
django_max_version: tuple[DjVersion] | None = None,
|
|
||||||
) -> tuple[DjVersion, list[DjVersion]]:
|
|
||||||
"""
|
"""
|
||||||
Grabs all Django versions that are worthy of a GitHub issue.
|
Grabs all Django versions that are worthy of a GitHub issue.
|
||||||
|
|
||||||
Depends on Django versions having higher major version or minor version.
|
Depends on Django versions having higher major version or minor version.
|
||||||
"""
|
"""
|
||||||
_django_max_version = (99, 99)
|
|
||||||
if django_max_version:
|
|
||||||
_django_max_version = django_max_version
|
|
||||||
|
|
||||||
print("Fetching all Django versions from PyPI")
|
print("Fetching all Django versions from PyPI")
|
||||||
base_txt = REQUIREMENTS_DIR / "base.txt"
|
base_txt = REQUIREMENTS_DIR / "base.txt"
|
||||||
with base_txt.open() as f:
|
with base_txt.open() as f:
|
||||||
for line in f.readlines():
|
for line in f.readlines():
|
||||||
if "django==" in line.lower():
|
if "django==" in line:
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
print(f"django not found in {base_txt}") # Huh...?
|
print(f"django not found in {base_txt}") # Huh...?
|
||||||
|
@ -110,7 +97,7 @@ def get_all_latest_django_versions(
|
||||||
current_minor_version = DjVersion.parse(current_version_str)
|
current_minor_version = DjVersion.parse(current_version_str)
|
||||||
newer_versions: set[DjVersion] = set()
|
newer_versions: set[DjVersion] = set()
|
||||||
for django_version in get_django_versions():
|
for django_version in get_django_versions():
|
||||||
if current_minor_version < django_version <= _django_max_version:
|
if django_version > current_minor_version:
|
||||||
newer_versions.add(django_version)
|
newer_versions.add(django_version)
|
||||||
|
|
||||||
return current_minor_version, sorted(newer_versions, reverse=True)
|
return current_minor_version, sorted(newer_versions, reverse=True)
|
||||||
|
@ -143,7 +130,9 @@ class GitHubManager:
|
||||||
self.requirements_files = ["base", "local", "production"]
|
self.requirements_files = ["base", "local", "production"]
|
||||||
# Format:
|
# Format:
|
||||||
# requirement file name: {package name: (master_version, package_info)}
|
# requirement file name: {package name: (master_version, package_info)}
|
||||||
self.requirements: dict[str, dict[str, tuple[str, dict]]] = {x: {} for x in self.requirements_files}
|
self.requirements: dict[str, dict[str, tuple[str, dict]]] = {
|
||||||
|
x: {} for x in self.requirements_files
|
||||||
|
}
|
||||||
|
|
||||||
def setup(self) -> None:
|
def setup(self) -> None:
|
||||||
self.load_requirements()
|
self.load_requirements()
|
||||||
|
@ -154,13 +143,7 @@ class GitHubManager:
|
||||||
for requirements_file in self.requirements_files:
|
for requirements_file in self.requirements_files:
|
||||||
with (REQUIREMENTS_DIR / f"{requirements_file}.txt").open() as f:
|
with (REQUIREMENTS_DIR / f"{requirements_file}.txt").open() as f:
|
||||||
for line in f.readlines():
|
for line in f.readlines():
|
||||||
if (
|
if "==" in line and not line.startswith("{%"):
|
||||||
"==" in line
|
|
||||||
and not line.startswith("{%")
|
|
||||||
and not line.startswith(" #")
|
|
||||||
and not line.startswith("#")
|
|
||||||
and not line.startswith(" ")
|
|
||||||
):
|
|
||||||
name, version = get_name_and_version(line)
|
name, version = get_name_and_version(line)
|
||||||
self.requirements[requirements_file][name] = (
|
self.requirements[requirements_file][name] = (
|
||||||
version,
|
version,
|
||||||
|
@ -177,19 +160,26 @@ class GitHubManager:
|
||||||
"is": "issue",
|
"is": "issue",
|
||||||
"in": "title",
|
"in": "title",
|
||||||
}
|
}
|
||||||
issues = list(self.github.search_issues("[Django Update]", "created", "desc", **qualifiers))
|
issues = list(
|
||||||
|
self.github.search_issues(
|
||||||
|
"[Django Update]", "created", "desc", **qualifiers
|
||||||
|
)
|
||||||
|
)
|
||||||
print(f"Found {len(issues)} issues matching search")
|
print(f"Found {len(issues)} issues matching search")
|
||||||
for issue in issues:
|
for issue in issues:
|
||||||
matches = re.match(r"\[Update Django] Django (\d+.\d+)$", issue.title)
|
matches = re.match(r"\[Update Django] Django (\d+.\d+)$", issue.title)
|
||||||
if not matches:
|
if not matches:
|
||||||
continue
|
continue
|
||||||
issue_version = DjVersion.parse(matches.group(1))
|
issue_version = DjVersion.parse(matches.group(1))
|
||||||
if self.base_dj_version >= issue_version:
|
if self.base_dj_version > issue_version:
|
||||||
self.close_issue(issue)
|
issue.edit(state="closed")
|
||||||
|
print(f"Closed issue {issue.title} (ID: [{issue.id}]({issue.url}))")
|
||||||
else:
|
else:
|
||||||
self.existing_issues[issue_version] = issue
|
self.existing_issues[issue_version] = issue
|
||||||
|
|
||||||
def get_compatibility(self, package_name: str, package_info: dict, needed_dj_version: DjVersion):
|
def get_compatibility(
|
||||||
|
self, package_name: str, package_info: dict, needed_dj_version: DjVersion
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
Verify compatibility via setup.py classifiers. If Django is not in the
|
Verify compatibility via setup.py classifiers. If Django is not in the
|
||||||
classifiers, then default compatibility is n/a and OK is ✅.
|
classifiers, then default compatibility is n/a and OK is ✅.
|
||||||
|
@ -202,7 +192,9 @@ class GitHubManager:
|
||||||
# updated packages, or known releases that will happen but haven't yet
|
# updated packages, or known releases that will happen but haven't yet
|
||||||
if issue := self.existing_issues.get(needed_dj_version):
|
if issue := self.existing_issues.get(needed_dj_version):
|
||||||
if index := issue.body.find(package_name):
|
if index := issue.body.find(package_name):
|
||||||
name, _current, prev_compat, ok = (s.strip() for s in issue.body[index:].split("|", 4)[:4])
|
name, _current, prev_compat, ok = [
|
||||||
|
s.strip() for s in issue.body[index:].split("|", 4)[:4]
|
||||||
|
]
|
||||||
if ok in ("✅", "❓", "🕒"):
|
if ok in ("✅", "❓", "🕒"):
|
||||||
return prev_compat, ok
|
return prev_compat, ok
|
||||||
|
|
||||||
|
@ -214,7 +206,7 @@ class GitHubManager:
|
||||||
for classifier in package_info["info"]["classifiers"]:
|
for classifier in package_info["info"]["classifiers"]:
|
||||||
# Usually in the form of "Framework :: Django :: 3.2"
|
# Usually in the form of "Framework :: Django :: 3.2"
|
||||||
tokens = classifier.split(" ")
|
tokens = classifier.split(" ")
|
||||||
if len(tokens) >= 5 and tokens[2].lower() == "django" and "." in tokens[4]:
|
if len(tokens) >= 5 and tokens[2].lower() == "django":
|
||||||
version = DjVersion.parse(tokens[4])
|
version = DjVersion.parse(tokens[4])
|
||||||
if len(version) == 2:
|
if len(version) == 2:
|
||||||
supported_dj_versions.append(version)
|
supported_dj_versions.append(version)
|
||||||
|
@ -222,7 +214,8 @@ class GitHubManager:
|
||||||
if supported_dj_versions:
|
if supported_dj_versions:
|
||||||
if any(v >= needed_dj_version for v in supported_dj_versions):
|
if any(v >= needed_dj_version for v in supported_dj_versions):
|
||||||
return package_info["info"]["version"], "✅"
|
return package_info["info"]["version"], "✅"
|
||||||
return "", "❌"
|
else:
|
||||||
|
return "", "❌"
|
||||||
|
|
||||||
# Django classifier DNE; assume it isn't a Django lib
|
# Django classifier DNE; assume it isn't a Django lib
|
||||||
# Great exceptions include pylint-django, where we need to do this manually...
|
# Great exceptions include pylint-django, where we need to do this manually...
|
||||||
|
@ -238,7 +231,9 @@ class GitHubManager:
|
||||||
]
|
]
|
||||||
|
|
||||||
def _get_md_home_page_url(self, package_info: dict):
|
def _get_md_home_page_url(self, package_info: dict):
|
||||||
urls = [package_info["info"].get(url_key) for url_key in self.HOME_PAGE_URL_KEYS]
|
urls = [
|
||||||
|
package_info["info"].get(url_key) for url_key in self.HOME_PAGE_URL_KEYS
|
||||||
|
]
|
||||||
try:
|
try:
|
||||||
return f"[{{}}]({next(item for item in urls if item)})"
|
return f"[{{}}]({next(item for item in urls if item)})"
|
||||||
except StopIteration:
|
except StopIteration:
|
||||||
|
@ -247,17 +242,20 @@ class GitHubManager:
|
||||||
def generate_markdown(self, needed_dj_version: DjVersion):
|
def generate_markdown(self, needed_dj_version: DjVersion):
|
||||||
requirements = f"{needed_dj_version} requirements tables\n\n"
|
requirements = f"{needed_dj_version} requirements tables\n\n"
|
||||||
for _file in self.requirements_files:
|
for _file in self.requirements_files:
|
||||||
requirements += _TABLE_HEADER.format_map({"file": _file, "dj_version": needed_dj_version})
|
requirements += _TABLE_HEADER.format_map(
|
||||||
|
{"file": _file, "dj_version": needed_dj_version}
|
||||||
|
)
|
||||||
for package_name, (version, info) in self.requirements[_file].items():
|
for package_name, (version, info) in self.requirements[_file].items():
|
||||||
compat_version, icon = self.get_compatibility(package_name, info, needed_dj_version)
|
compat_version, icon = self.get_compatibility(
|
||||||
|
package_name, info, needed_dj_version
|
||||||
|
)
|
||||||
requirements += (
|
requirements += (
|
||||||
f"| {self._get_md_home_page_url(info).format(package_name)} "
|
f"| {self._get_md_home_page_url(info).format(package_name)} "
|
||||||
f"| {version.strip()} "
|
f"| {version} "
|
||||||
f"| {compat_version.strip()} "
|
f"| {compat_version} "
|
||||||
f"| {icon} "
|
f"| {icon} "
|
||||||
f"|\n"
|
f"|\n"
|
||||||
)
|
)
|
||||||
|
|
||||||
return requirements
|
return requirements
|
||||||
|
|
||||||
def create_or_edit_issue(self, needed_dj_version: DjVersion, description: str):
|
def create_or_edit_issue(self, needed_dj_version: DjVersion, description: str):
|
||||||
|
@ -266,14 +264,11 @@ class GitHubManager:
|
||||||
issue.edit(body=description)
|
issue.edit(body=description)
|
||||||
else:
|
else:
|
||||||
print(f"Creating new issue for Django {needed_dj_version}")
|
print(f"Creating new issue for Django {needed_dj_version}")
|
||||||
issue = self.repo.create_issue(f"[Update Django] Django {needed_dj_version}", description)
|
issue = self.repo.create_issue(
|
||||||
|
f"[Update Django] Django {needed_dj_version}", description
|
||||||
|
)
|
||||||
issue.add_to_labels(f"django{needed_dj_version}")
|
issue.add_to_labels(f"django{needed_dj_version}")
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def close_issue(issue: Issue):
|
|
||||||
issue.edit(state="closed")
|
|
||||||
print(f"Closed issue {issue.title} (ID: [{issue.id}]({issue.url}))")
|
|
||||||
|
|
||||||
def generate(self):
|
def generate(self):
|
||||||
for version in self.needed_dj_versions:
|
for version in self.needed_dj_versions:
|
||||||
print(f"Handling GitHub issue for Django {version}")
|
print(f"Handling GitHub issue for Django {version}")
|
||||||
|
@ -282,27 +277,19 @@ class GitHubManager:
|
||||||
self.create_or_edit_issue(version, md_content)
|
self.create_or_edit_issue(version, md_content)
|
||||||
|
|
||||||
|
|
||||||
def main(django_max_version=None) -> None:
|
def main() -> None:
|
||||||
# Check if there are any djs
|
# Check if there are any djs
|
||||||
current_dj, latest_djs = get_all_latest_django_versions(django_max_version=django_max_version)
|
current_dj, latest_djs = get_all_latest_django_versions()
|
||||||
|
if not latest_djs:
|
||||||
# Run the setup, which might close old issues
|
sys.exit(0)
|
||||||
manager = GitHubManager(current_dj, latest_djs)
|
manager = GitHubManager(current_dj, latest_djs)
|
||||||
manager.setup()
|
manager.setup()
|
||||||
|
|
||||||
if not latest_djs:
|
|
||||||
print("No new Django versions to update. Exiting...")
|
|
||||||
sys.exit(0)
|
|
||||||
|
|
||||||
manager.generate()
|
manager.generate()
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
if GITHUB_REPO is None:
|
if GITHUB_REPO is None:
|
||||||
raise RuntimeError("No github repo, please set the environment variable GITHUB_REPOSITORY")
|
raise RuntimeError(
|
||||||
max_version = None
|
"No github repo, please set the environment variable GITHUB_REPOSITORY"
|
||||||
last_arg = sys.argv[-1]
|
)
|
||||||
if CURRENT_FILE.name not in last_arg:
|
main()
|
||||||
max_version = DjVersion.parse_to_tuple(version_str=last_arg)
|
|
||||||
|
|
||||||
main(django_max_version=max_version)
|
|
||||||
|
|
|
@ -1,69 +0,0 @@
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import json
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
ROOT = Path(__file__).parent.parent
|
|
||||||
TEMPLATED_ROOT = ROOT / "{{cookiecutter.project_slug}}"
|
|
||||||
DOCKERFILE = TEMPLATED_ROOT / "compose" / "local" / "node" / "Dockerfile"
|
|
||||||
PROD_DOCKERFILE = TEMPLATED_ROOT / "compose" / "production" / "django" / "Dockerfile"
|
|
||||||
PACKAGE_JSON = TEMPLATED_ROOT / "package.json"
|
|
||||||
CI_YML = ROOT / ".github" / "workflows" / "ci.yml"
|
|
||||||
|
|
||||||
|
|
||||||
def main() -> None:
|
|
||||||
new_version = get_version_from_dockerfile()
|
|
||||||
old_version = get_version_from_package_json()
|
|
||||||
if old_version != new_version:
|
|
||||||
update_package_json_version(old_version, new_version)
|
|
||||||
update_ci_node_version(old_version, new_version)
|
|
||||||
update_production_node_version(old_version, new_version)
|
|
||||||
|
|
||||||
|
|
||||||
def get_version_from_dockerfile() -> str:
|
|
||||||
# Extract version out of base image name:
|
|
||||||
# FROM docker.io/node:22.13-bookworm-slim
|
|
||||||
# -> 22.13
|
|
||||||
with DOCKERFILE.open("r") as f:
|
|
||||||
for line in f:
|
|
||||||
if "FROM docker.io/node:" in line:
|
|
||||||
_, _, docker_tag = line.partition(":")
|
|
||||||
version_str, _, _ = docker_tag.partition("-")
|
|
||||||
return version_str
|
|
||||||
raise RuntimeError("Could not find version in Dockerfile")
|
|
||||||
|
|
||||||
|
|
||||||
def get_version_from_package_json() -> str:
|
|
||||||
package_json = json.loads(PACKAGE_JSON.read_text())
|
|
||||||
return package_json["engines"]["node"]
|
|
||||||
|
|
||||||
|
|
||||||
def update_package_json_version(old_version: str, new_version: str) -> None:
|
|
||||||
package_json_text = PACKAGE_JSON.read_text()
|
|
||||||
package_json_text = package_json_text.replace(
|
|
||||||
f'"node": "{old_version}"',
|
|
||||||
f'"node": "{new_version}"',
|
|
||||||
)
|
|
||||||
PACKAGE_JSON.write_text(package_json_text)
|
|
||||||
|
|
||||||
|
|
||||||
def update_ci_node_version(old_version: str, new_version: str) -> None:
|
|
||||||
yml_content = CI_YML.read_text()
|
|
||||||
yml_content = yml_content.replace(
|
|
||||||
f'node-version: "{old_version}"',
|
|
||||||
f'node-version: "{new_version}"',
|
|
||||||
)
|
|
||||||
CI_YML.write_text(yml_content)
|
|
||||||
|
|
||||||
|
|
||||||
def update_production_node_version(old_version: str, new_version: str) -> None:
|
|
||||||
dockerfile_content = PROD_DOCKERFILE.read_text()
|
|
||||||
dockerfile_content = dockerfile_content.replace(
|
|
||||||
f"FROM docker.io/node:{old_version}",
|
|
||||||
f"FROM docker.io/node:{new_version}",
|
|
||||||
)
|
|
||||||
PROD_DOCKERFILE.write_text(dockerfile_content)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
|
@ -1,56 +0,0 @@
|
||||||
from __future__ import annotations
|
|
||||||
|
|
||||||
import subprocess
|
|
||||||
import tomllib
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
ROOT = Path(__file__).parent.parent
|
|
||||||
TEMPLATED_ROOT = ROOT / "{{cookiecutter.project_slug}}"
|
|
||||||
REQUIREMENTS_LOCAL_TXT = TEMPLATED_ROOT / "requirements" / "local.txt"
|
|
||||||
PRE_COMMIT_CONFIG = TEMPLATED_ROOT / ".pre-commit-config.yaml"
|
|
||||||
PYPROJECT_TOML = ROOT / "pyproject.toml"
|
|
||||||
|
|
||||||
|
|
||||||
def main() -> None:
|
|
||||||
new_version = get_requirements_txt_version()
|
|
||||||
old_version = get_pyproject_toml_version()
|
|
||||||
if old_version == new_version:
|
|
||||||
return
|
|
||||||
|
|
||||||
update_ruff_version(old_version, new_version)
|
|
||||||
subprocess.run(["uv", "lock", "--no-upgrade"], cwd=ROOT)
|
|
||||||
|
|
||||||
|
|
||||||
def get_requirements_txt_version() -> str:
|
|
||||||
content = REQUIREMENTS_LOCAL_TXT.read_text()
|
|
||||||
for line in content.split("\n"):
|
|
||||||
if line.startswith("ruff"):
|
|
||||||
return line.split(" ")[0].split("==")[1]
|
|
||||||
raise RuntimeError("Could not find ruff version in requirements/local.txt")
|
|
||||||
|
|
||||||
|
|
||||||
def get_pyproject_toml_version() -> str:
|
|
||||||
data = tomllib.loads(PYPROJECT_TOML.read_text())
|
|
||||||
for dependency in data["project"]["dependencies"]:
|
|
||||||
if dependency.startswith("ruff=="):
|
|
||||||
return dependency.split("==")[1]
|
|
||||||
raise RuntimeError("Could not find ruff version in pyproject.toml")
|
|
||||||
|
|
||||||
|
|
||||||
def update_ruff_version(old_version: str, new_version: str) -> None:
|
|
||||||
# Update pyproject.toml
|
|
||||||
new_content = PYPROJECT_TOML.read_text().replace(
|
|
||||||
f"ruff=={old_version}",
|
|
||||||
f"ruff=={new_version}",
|
|
||||||
)
|
|
||||||
PYPROJECT_TOML.write_text(new_content)
|
|
||||||
# Update pre-commit config
|
|
||||||
new_content = PRE_COMMIT_CONFIG.read_text().replace(
|
|
||||||
f"repo: https://github.com/astral-sh/ruff-pre-commit\n rev: v{old_version}",
|
|
||||||
f"repo: https://github.com/astral-sh/ruff-pre-commit\n rev: v{new_version}",
|
|
||||||
)
|
|
||||||
PRE_COMMIT_CONFIG.write_text(new_content)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
|
||||||
main()
|
|
|
@ -1,9 +1,8 @@
|
||||||
import datetime as dt
|
import datetime as dt
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import subprocess
|
|
||||||
from collections.abc import Iterable
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
from typing import Iterable
|
||||||
|
|
||||||
import git
|
import git
|
||||||
import github.PullRequest
|
import github.PullRequest
|
||||||
|
@ -33,9 +32,6 @@ def main() -> None:
|
||||||
|
|
||||||
# Group pull requests by type of change
|
# Group pull requests by type of change
|
||||||
grouped_pulls = group_pulls_by_change_type(merged_pulls)
|
grouped_pulls = group_pulls_by_change_type(merged_pulls)
|
||||||
if not any(grouped_pulls.values()):
|
|
||||||
print("Pull requests merged aren't worth a changelog mention.")
|
|
||||||
return
|
|
||||||
|
|
||||||
# Generate portion of markdown
|
# Generate portion of markdown
|
||||||
release_changes_summary = generate_md(grouped_pulls)
|
release_changes_summary = generate_md(grouped_pulls)
|
||||||
|
@ -48,16 +44,12 @@ def main() -> None:
|
||||||
print(f"Wrote {changelog_path}")
|
print(f"Wrote {changelog_path}")
|
||||||
|
|
||||||
# Update version
|
# Update version
|
||||||
setup_py_path = ROOT / "pyproject.toml"
|
setup_py_path = ROOT / "setup.py"
|
||||||
update_version(setup_py_path, release)
|
update_version(setup_py_path, release)
|
||||||
print(f"Updated version in {setup_py_path}")
|
print(f"Updated version in {setup_py_path}")
|
||||||
|
|
||||||
# Run uv lock
|
|
||||||
uv_lock_path = ROOT / "uv.lock"
|
|
||||||
subprocess.run(["uv", "lock", "--no-upgrade"], cwd=ROOT)
|
|
||||||
|
|
||||||
# Commit changes, create tag and push
|
# Commit changes, create tag and push
|
||||||
update_git_repo([changelog_path, setup_py_path, uv_lock_path], release)
|
update_git_repo([changelog_path, setup_py_path], release)
|
||||||
|
|
||||||
# Create GitHub release
|
# Create GitHub release
|
||||||
github_release = repo.create_git_release(
|
github_release = repo.create_git_release(
|
||||||
|
@ -90,20 +82,14 @@ def group_pulls_by_change_type(
|
||||||
grouped_pulls = {
|
grouped_pulls = {
|
||||||
"Changed": [],
|
"Changed": [],
|
||||||
"Fixed": [],
|
"Fixed": [],
|
||||||
"Documentation": [],
|
|
||||||
"Updated": [],
|
"Updated": [],
|
||||||
}
|
}
|
||||||
for pull in pull_requests_list:
|
for pull in pull_requests_list:
|
||||||
label_names = {label.name for label in pull.labels}
|
label_names = {label.name for label in pull.labels}
|
||||||
if "project infrastructure" in label_names:
|
|
||||||
# Don't mention it in the changelog
|
|
||||||
continue
|
|
||||||
if "update" in label_names:
|
if "update" in label_names:
|
||||||
group_name = "Updated"
|
group_name = "Updated"
|
||||||
elif "bug" in label_names:
|
elif "bug" in label_names:
|
||||||
group_name = "Fixed"
|
group_name = "Fixed"
|
||||||
elif "docs" in label_names:
|
|
||||||
group_name = "Documentation"
|
|
||||||
else:
|
else:
|
||||||
group_name = "Changed"
|
group_name = "Changed"
|
||||||
grouped_pulls[group_name].append(pull)
|
grouped_pulls[group_name].append(pull)
|
||||||
|
@ -129,7 +115,7 @@ def write_changelog(file_path: Path, release: str, content: str) -> None:
|
||||||
|
|
||||||
|
|
||||||
def update_version(file_path: Path, release: str) -> None:
|
def update_version(file_path: Path, release: str) -> None:
|
||||||
"""Update template version in pyproject.toml."""
|
"""Update template version in setup.py."""
|
||||||
old_content = file_path.read_text()
|
old_content = file_path.read_text()
|
||||||
updated_content = re.sub(
|
updated_content = re.sub(
|
||||||
r'\nversion = "\d+\.\d+\.\d+"\n',
|
r'\nversion = "\d+\.\d+\.\d+"\n',
|
||||||
|
@ -162,7 +148,11 @@ def update_git_repo(paths: list[Path], release: str) -> None:
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
if GITHUB_REPO is None:
|
if GITHUB_REPO is None:
|
||||||
raise RuntimeError("No github repo, please set the environment variable GITHUB_REPOSITORY")
|
raise RuntimeError(
|
||||||
|
"No github repo, please set the environment variable GITHUB_REPOSITORY"
|
||||||
|
)
|
||||||
if GIT_BRANCH is None:
|
if GIT_BRANCH is None:
|
||||||
raise RuntimeError("No git branch set, please set the GITHUB_REF_NAME environment variable")
|
raise RuntimeError(
|
||||||
|
"No git branch set, please set the GITHUB_REF_NAME environment variable"
|
||||||
|
)
|
||||||
main()
|
main()
|
||||||
|
|
|
@ -40,13 +40,19 @@ def iter_recent_authors():
|
||||||
"""
|
"""
|
||||||
Fetch users who opened recently merged pull requests.
|
Fetch users who opened recently merged pull requests.
|
||||||
|
|
||||||
Use GitHub API to fetch recent authors rather than
|
Use Github API to fetch recent authors rather than
|
||||||
git CLI to work with GitHub usernames.
|
git CLI to work with Github usernames.
|
||||||
"""
|
"""
|
||||||
repo = Github(login_or_token=GITHUB_TOKEN, per_page=5).get_repo(GITHUB_REPO)
|
repo = Github(login_or_token=GITHUB_TOKEN, per_page=5).get_repo(GITHUB_REPO)
|
||||||
recent_pulls = repo.get_pulls(state="closed", sort="updated", direction="desc").get_page(0)
|
recent_pulls = repo.get_pulls(
|
||||||
|
state="closed", sort="updated", direction="desc"
|
||||||
|
).get_page(0)
|
||||||
for pull in recent_pulls:
|
for pull in recent_pulls:
|
||||||
if pull.merged and pull.user.type == "User" and pull.user.login not in BOT_LOGINS:
|
if (
|
||||||
|
pull.merged
|
||||||
|
and pull.user.type == "User"
|
||||||
|
and pull.user.login not in BOT_LOGINS
|
||||||
|
):
|
||||||
yield pull.user
|
yield pull.user
|
||||||
|
|
||||||
|
|
||||||
|
@ -90,7 +96,9 @@ def write_md_file(contributors):
|
||||||
core_contributors = [c for c in contributors if c.get("is_core", False)]
|
core_contributors = [c for c in contributors if c.get("is_core", False)]
|
||||||
other_contributors = (c for c in contributors if not c.get("is_core", False))
|
other_contributors = (c for c in contributors if not c.get("is_core", False))
|
||||||
other_contributors = sorted(other_contributors, key=lambda c: c["name"].lower())
|
other_contributors = sorted(other_contributors, key=lambda c: c["name"].lower())
|
||||||
content = template.render(core_contributors=core_contributors, other_contributors=other_contributors)
|
content = template.render(
|
||||||
|
core_contributors=core_contributors, other_contributors=other_contributors
|
||||||
|
)
|
||||||
|
|
||||||
file_path = ROOT / "CONTRIBUTORS.md"
|
file_path = ROOT / "CONTRIBUTORS.md"
|
||||||
file_path.write_text(content)
|
file_path.write_text(content)
|
||||||
|
@ -98,5 +106,7 @@ def write_md_file(contributors):
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
if GITHUB_REPO is None:
|
if GITHUB_REPO is None:
|
||||||
raise RuntimeError("No github repo, please set the environment variable GITHUB_REPOSITORY")
|
raise RuntimeError(
|
||||||
|
"No github repo, please set the environment variable GITHUB_REPOSITORY"
|
||||||
|
)
|
||||||
main()
|
main()
|
||||||
|
|
7
setup.cfg
Normal file
7
setup.cfg
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
[flake8]
|
||||||
|
exclude = docs
|
||||||
|
max-line-length = 88
|
||||||
|
|
||||||
|
[isort]
|
||||||
|
profile = black
|
||||||
|
known_first_party = tests,scripts,hooks
|
44
setup.py
Normal file
44
setup.py
Normal file
|
@ -0,0 +1,44 @@
|
||||||
|
#!/usr/bin/env python
|
||||||
|
try:
|
||||||
|
from setuptools import setup
|
||||||
|
except ImportError:
|
||||||
|
from distutils.core import setup
|
||||||
|
|
||||||
|
# We use calendar versioning
|
||||||
|
version = "2022.04.05"
|
||||||
|
|
||||||
|
with open("README.rst") as readme_file:
|
||||||
|
long_description = readme_file.read()
|
||||||
|
|
||||||
|
setup(
|
||||||
|
name="cookiecutter-django",
|
||||||
|
version=version,
|
||||||
|
description=(
|
||||||
|
"A Cookiecutter template for creating production-ready "
|
||||||
|
"Django projects quickly."
|
||||||
|
),
|
||||||
|
long_description=long_description,
|
||||||
|
author="Daniel Roy Greenfeld",
|
||||||
|
author_email="pydanny@gmail.com",
|
||||||
|
url="https://github.com/cookiecutter/cookiecutter-django",
|
||||||
|
packages=[],
|
||||||
|
license="BSD",
|
||||||
|
zip_safe=False,
|
||||||
|
classifiers=[
|
||||||
|
"Development Status :: 4 - Beta",
|
||||||
|
"Environment :: Console",
|
||||||
|
"Framework :: Django :: 3.2",
|
||||||
|
"Intended Audience :: Developers",
|
||||||
|
"Natural Language :: English",
|
||||||
|
"License :: OSI Approved :: BSD License",
|
||||||
|
"Programming Language :: Python",
|
||||||
|
"Programming Language :: Python :: 3",
|
||||||
|
"Programming Language :: Python :: 3.9",
|
||||||
|
"Programming Language :: Python :: Implementation :: CPython",
|
||||||
|
"Topic :: Software Development",
|
||||||
|
],
|
||||||
|
keywords=(
|
||||||
|
"cookiecutter, Python, projects, project templates, django, "
|
||||||
|
"skeleton, scaffolding, project directory, setup.py"
|
||||||
|
),
|
||||||
|
)
|
|
@ -11,7 +11,7 @@ mkdir -p .cache/bare
|
||||||
cd .cache/bare
|
cd .cache/bare
|
||||||
|
|
||||||
# create the project using the default settings in cookiecutter.json
|
# create the project using the default settings in cookiecutter.json
|
||||||
uv run cookiecutter ../../ --no-input --overwrite-if-exists use_docker=n "$@"
|
cookiecutter ../../ --no-input --overwrite-if-exists use_docker=n "$@"
|
||||||
cd my_awesome_project
|
cd my_awesome_project
|
||||||
|
|
||||||
# Install OS deps
|
# Install OS deps
|
||||||
|
@ -20,17 +20,25 @@ sudo utility/install_os_dependencies.sh install
|
||||||
# Install Python deps
|
# Install Python deps
|
||||||
pip install -r requirements/local.txt
|
pip install -r requirements/local.txt
|
||||||
|
|
||||||
|
# Lint by running pre-commit on all files
|
||||||
|
# Needs a git repo to find the project root
|
||||||
|
git init
|
||||||
|
git add .
|
||||||
|
pre-commit run --show-diff-on-failure -a
|
||||||
|
|
||||||
# run the project's tests
|
# run the project's tests
|
||||||
pytest
|
pytest
|
||||||
|
|
||||||
# Make sure the check doesn't raise any warnings
|
# Make sure the check doesn't raise any warnings
|
||||||
python manage.py check --fail-level WARNING
|
python manage.py check --fail-level WARNING
|
||||||
|
|
||||||
# Run npm build script if package.json is present
|
|
||||||
if [ -f "package.json" ]
|
if [ -f "package.json" ]
|
||||||
then
|
then
|
||||||
npm install
|
npm install
|
||||||
npm run build
|
if [ -f "gulpfile.js" ]
|
||||||
|
then
|
||||||
|
npm run build
|
||||||
|
fi
|
||||||
fi
|
fi
|
||||||
|
|
||||||
# Generate the HTML for the documentation
|
# Generate the HTML for the documentation
|
||||||
|
|
|
@ -1,9 +1,6 @@
|
||||||
import glob
|
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
from collections.abc import Iterable
|
|
||||||
from pathlib import Path
|
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
|
|
||||||
|
@ -23,12 +20,6 @@ if sys.platform.startswith("win"):
|
||||||
elif sys.platform.startswith("darwin") and os.getenv("CI"):
|
elif sys.platform.startswith("darwin") and os.getenv("CI"):
|
||||||
pytest.skip("skipping slow macOS tests on CI", allow_module_level=True)
|
pytest.skip("skipping slow macOS tests on CI", allow_module_level=True)
|
||||||
|
|
||||||
# Run auto-fixable styles checks - skipped on CI by default. These can be fixed
|
|
||||||
# automatically by running pre-commit after generation however they are tedious
|
|
||||||
# to fix in the template, so we don't insist too much in fixing them.
|
|
||||||
AUTOFIXABLE_STYLES = os.getenv("AUTOFIXABLE_STYLES") == "1"
|
|
||||||
auto_fixable = pytest.mark.skipif(not AUTOFIXABLE_STYLES, reason="auto-fixable")
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def context():
|
def context():
|
||||||
|
@ -45,8 +36,6 @@ def context():
|
||||||
|
|
||||||
|
|
||||||
SUPPORTED_COMBINATIONS = [
|
SUPPORTED_COMBINATIONS = [
|
||||||
{"username_type": "username"},
|
|
||||||
{"username_type": "email"},
|
|
||||||
{"open_source_license": "MIT"},
|
{"open_source_license": "MIT"},
|
||||||
{"open_source_license": "BSD"},
|
{"open_source_license": "BSD"},
|
||||||
{"open_source_license": "GPLv3"},
|
{"open_source_license": "GPLv3"},
|
||||||
|
@ -54,28 +43,25 @@ SUPPORTED_COMBINATIONS = [
|
||||||
{"open_source_license": "Not open source"},
|
{"open_source_license": "Not open source"},
|
||||||
{"windows": "y"},
|
{"windows": "y"},
|
||||||
{"windows": "n"},
|
{"windows": "n"},
|
||||||
{"editor": "None"},
|
{"use_pycharm": "y"},
|
||||||
{"editor": "PyCharm"},
|
{"use_pycharm": "n"},
|
||||||
{"editor": "VS Code"},
|
|
||||||
{"use_docker": "y"},
|
{"use_docker": "y"},
|
||||||
{"use_docker": "n"},
|
{"use_docker": "n"},
|
||||||
{"postgresql_version": "17"},
|
{"postgresql_version": "14.1"},
|
||||||
{"postgresql_version": "16"},
|
{"postgresql_version": "13.5"},
|
||||||
{"postgresql_version": "15"},
|
{"postgresql_version": "12.9"},
|
||||||
{"postgresql_version": "14"},
|
{"postgresql_version": "11.14"},
|
||||||
{"postgresql_version": "13"},
|
{"postgresql_version": "10.19"},
|
||||||
{"cloud_provider": "AWS", "use_whitenoise": "y"},
|
{"cloud_provider": "AWS", "use_whitenoise": "y"},
|
||||||
{"cloud_provider": "AWS", "use_whitenoise": "n"},
|
{"cloud_provider": "AWS", "use_whitenoise": "n"},
|
||||||
{"cloud_provider": "GCP", "use_whitenoise": "y"},
|
{"cloud_provider": "GCP", "use_whitenoise": "y"},
|
||||||
{"cloud_provider": "GCP", "use_whitenoise": "n"},
|
{"cloud_provider": "GCP", "use_whitenoise": "n"},
|
||||||
{"cloud_provider": "Azure", "use_whitenoise": "y"},
|
|
||||||
{"cloud_provider": "Azure", "use_whitenoise": "n"},
|
|
||||||
{"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "Mailgun"},
|
{"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "Mailgun"},
|
||||||
{"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "Mailjet"},
|
{"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "Mailjet"},
|
||||||
{"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "Mandrill"},
|
{"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "Mandrill"},
|
||||||
{"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "Postmark"},
|
{"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "Postmark"},
|
||||||
{"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "Sendgrid"},
|
{"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "Sendgrid"},
|
||||||
{"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "Brevo"},
|
{"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "SendinBlue"},
|
||||||
{"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "SparkPost"},
|
{"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "SparkPost"},
|
||||||
{"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "Other SMTP"},
|
{"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "Other SMTP"},
|
||||||
# Note: cloud_provider=None AND use_whitenoise=n is not supported
|
# Note: cloud_provider=None AND use_whitenoise=n is not supported
|
||||||
|
@ -85,7 +71,7 @@ SUPPORTED_COMBINATIONS = [
|
||||||
{"cloud_provider": "AWS", "mail_service": "Mandrill"},
|
{"cloud_provider": "AWS", "mail_service": "Mandrill"},
|
||||||
{"cloud_provider": "AWS", "mail_service": "Postmark"},
|
{"cloud_provider": "AWS", "mail_service": "Postmark"},
|
||||||
{"cloud_provider": "AWS", "mail_service": "Sendgrid"},
|
{"cloud_provider": "AWS", "mail_service": "Sendgrid"},
|
||||||
{"cloud_provider": "AWS", "mail_service": "Brevo"},
|
{"cloud_provider": "AWS", "mail_service": "SendinBlue"},
|
||||||
{"cloud_provider": "AWS", "mail_service": "SparkPost"},
|
{"cloud_provider": "AWS", "mail_service": "SparkPost"},
|
||||||
{"cloud_provider": "AWS", "mail_service": "Other SMTP"},
|
{"cloud_provider": "AWS", "mail_service": "Other SMTP"},
|
||||||
{"cloud_provider": "GCP", "mail_service": "Mailgun"},
|
{"cloud_provider": "GCP", "mail_service": "Mailgun"},
|
||||||
|
@ -93,31 +79,21 @@ SUPPORTED_COMBINATIONS = [
|
||||||
{"cloud_provider": "GCP", "mail_service": "Mandrill"},
|
{"cloud_provider": "GCP", "mail_service": "Mandrill"},
|
||||||
{"cloud_provider": "GCP", "mail_service": "Postmark"},
|
{"cloud_provider": "GCP", "mail_service": "Postmark"},
|
||||||
{"cloud_provider": "GCP", "mail_service": "Sendgrid"},
|
{"cloud_provider": "GCP", "mail_service": "Sendgrid"},
|
||||||
{"cloud_provider": "GCP", "mail_service": "Brevo"},
|
{"cloud_provider": "GCP", "mail_service": "SendinBlue"},
|
||||||
{"cloud_provider": "GCP", "mail_service": "SparkPost"},
|
{"cloud_provider": "GCP", "mail_service": "SparkPost"},
|
||||||
{"cloud_provider": "GCP", "mail_service": "Other SMTP"},
|
{"cloud_provider": "GCP", "mail_service": "Other SMTP"},
|
||||||
{"cloud_provider": "Azure", "mail_service": "Mailgun"},
|
# Note: cloud_providers GCP and None with mail_service Amazon SES is not supported
|
||||||
{"cloud_provider": "Azure", "mail_service": "Mailjet"},
|
|
||||||
{"cloud_provider": "Azure", "mail_service": "Mandrill"},
|
|
||||||
{"cloud_provider": "Azure", "mail_service": "Postmark"},
|
|
||||||
{"cloud_provider": "Azure", "mail_service": "Sendgrid"},
|
|
||||||
{"cloud_provider": "Azure", "mail_service": "Brevo"},
|
|
||||||
{"cloud_provider": "Azure", "mail_service": "SparkPost"},
|
|
||||||
{"cloud_provider": "Azure", "mail_service": "Other SMTP"},
|
|
||||||
# Note: cloud_providers GCP, Azure, and None
|
|
||||||
# with mail_service Amazon SES is not supported
|
|
||||||
{"use_async": "y"},
|
{"use_async": "y"},
|
||||||
{"use_async": "n"},
|
{"use_async": "n"},
|
||||||
{"use_drf": "y"},
|
{"use_drf": "y"},
|
||||||
{"use_drf": "n"},
|
{"use_drf": "n"},
|
||||||
{"frontend_pipeline": "None"},
|
{"frontend_pipeline": "None"},
|
||||||
{"frontend_pipeline": "Django Compressor"},
|
{"frontend_pipeline": "django-compressor"},
|
||||||
{"frontend_pipeline": "Gulp"},
|
{"frontend_pipeline": "Gulp"},
|
||||||
{"frontend_pipeline": "Webpack"},
|
|
||||||
{"use_celery": "y"},
|
{"use_celery": "y"},
|
||||||
{"use_celery": "n"},
|
{"use_celery": "n"},
|
||||||
{"use_mailpit": "y"},
|
{"use_mailhog": "y"},
|
||||||
{"use_mailpit": "n"},
|
{"use_mailhog": "n"},
|
||||||
{"use_sentry": "y"},
|
{"use_sentry": "y"},
|
||||||
{"use_sentry": "n"},
|
{"use_sentry": "n"},
|
||||||
{"use_whitenoise": "y"},
|
{"use_whitenoise": "y"},
|
||||||
|
@ -128,7 +104,6 @@ SUPPORTED_COMBINATIONS = [
|
||||||
{"ci_tool": "Travis"},
|
{"ci_tool": "Travis"},
|
||||||
{"ci_tool": "Gitlab"},
|
{"ci_tool": "Gitlab"},
|
||||||
{"ci_tool": "Github"},
|
{"ci_tool": "Github"},
|
||||||
{"ci_tool": "Drone"},
|
|
||||||
{"keep_local_envs_in_vcs": "y"},
|
{"keep_local_envs_in_vcs": "y"},
|
||||||
{"keep_local_envs_in_vcs": "n"},
|
{"keep_local_envs_in_vcs": "n"},
|
||||||
{"debug": "y"},
|
{"debug": "y"},
|
||||||
|
@ -138,7 +113,6 @@ SUPPORTED_COMBINATIONS = [
|
||||||
UNSUPPORTED_COMBINATIONS = [
|
UNSUPPORTED_COMBINATIONS = [
|
||||||
{"cloud_provider": "None", "use_whitenoise": "n"},
|
{"cloud_provider": "None", "use_whitenoise": "n"},
|
||||||
{"cloud_provider": "GCP", "mail_service": "Amazon SES"},
|
{"cloud_provider": "GCP", "mail_service": "Amazon SES"},
|
||||||
{"cloud_provider": "Azure", "mail_service": "Amazon SES"},
|
|
||||||
{"cloud_provider": "None", "mail_service": "Amazon SES"},
|
{"cloud_provider": "None", "mail_service": "Amazon SES"},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -148,19 +122,23 @@ def _fixture_id(ctx):
|
||||||
return "-".join(f"{key}:{value}" for key, value in ctx.items())
|
return "-".join(f"{key}:{value}" for key, value in ctx.items())
|
||||||
|
|
||||||
|
|
||||||
def build_files_list(base_path: Path):
|
def build_files_list(root_dir):
|
||||||
"""Build a list containing absolute paths to the generated files."""
|
"""Build a list containing absolute paths to the generated files."""
|
||||||
return [dirpath / file_path for dirpath, subdirs, files in base_path.walk() for file_path in files]
|
return [
|
||||||
|
os.path.join(dirpath, file_path)
|
||||||
|
for dirpath, subdirs, files in os.walk(root_dir)
|
||||||
|
for file_path in files
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
def check_paths(paths: Iterable[Path]):
|
def check_paths(paths):
|
||||||
"""Method to check all paths have correct substitutions."""
|
"""Method to check all paths have correct substitutions."""
|
||||||
# Assert that no match is found in any of the files
|
# Assert that no match is found in any of the files
|
||||||
for path in paths:
|
for path in paths:
|
||||||
if is_binary(str(path)):
|
if is_binary(path):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
for line in path.open():
|
for line in open(path, "r"):
|
||||||
match = RE_OBJ.search(line)
|
match = RE_OBJ.search(line)
|
||||||
assert match is None, f"cookiecutter variable not replaced in {path}"
|
assert match is None, f"cookiecutter variable not replaced in {path}"
|
||||||
|
|
||||||
|
@ -175,31 +153,33 @@ def test_project_generation(cookies, context, context_override):
|
||||||
assert result.project_path.name == context["project_slug"]
|
assert result.project_path.name == context["project_slug"]
|
||||||
assert result.project_path.is_dir()
|
assert result.project_path.is_dir()
|
||||||
|
|
||||||
paths = build_files_list(result.project_path)
|
paths = build_files_list(str(result.project_path))
|
||||||
assert paths
|
assert paths
|
||||||
check_paths(paths)
|
check_paths(paths)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("context_override", SUPPORTED_COMBINATIONS, ids=_fixture_id)
|
@pytest.mark.parametrize("context_override", SUPPORTED_COMBINATIONS, ids=_fixture_id)
|
||||||
def test_ruff_check_passes(cookies, context_override):
|
def test_flake8_passes(cookies, context_override):
|
||||||
"""Generated project should pass ruff check."""
|
"""Generated project should pass flake8."""
|
||||||
result = cookies.bake(extra_context=context_override)
|
result = cookies.bake(extra_context=context_override)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
sh.ruff("check", ".", _cwd=str(result.project_path))
|
sh.flake8(_cwd=str(result.project_path))
|
||||||
except sh.ErrorReturnCode as e:
|
except sh.ErrorReturnCode as e:
|
||||||
pytest.fail(e.stdout.decode())
|
pytest.fail(e.stdout.decode())
|
||||||
|
|
||||||
|
|
||||||
@auto_fixable
|
|
||||||
@pytest.mark.parametrize("context_override", SUPPORTED_COMBINATIONS, ids=_fixture_id)
|
@pytest.mark.parametrize("context_override", SUPPORTED_COMBINATIONS, ids=_fixture_id)
|
||||||
def test_ruff_format_passes(cookies, context_override):
|
def test_black_passes(cookies, context_override):
|
||||||
"""Check whether generated project passes ruff format."""
|
"""Generated project should pass black."""
|
||||||
result = cookies.bake(extra_context=context_override)
|
result = cookies.bake(extra_context=context_override)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
sh.ruff(
|
sh.black(
|
||||||
"format",
|
"--check",
|
||||||
|
"--diff",
|
||||||
|
"--exclude",
|
||||||
|
"migrations",
|
||||||
".",
|
".",
|
||||||
_cwd=str(result.project_path),
|
_cwd=str(result.project_path),
|
||||||
)
|
)
|
||||||
|
@ -207,76 +187,11 @@ def test_ruff_format_passes(cookies, context_override):
|
||||||
pytest.fail(e.stdout.decode())
|
pytest.fail(e.stdout.decode())
|
||||||
|
|
||||||
|
|
||||||
@auto_fixable
|
|
||||||
@pytest.mark.parametrize("context_override", SUPPORTED_COMBINATIONS, ids=_fixture_id)
|
|
||||||
def test_isort_passes(cookies, context_override):
|
|
||||||
"""Check whether generated project passes isort style."""
|
|
||||||
result = cookies.bake(extra_context=context_override)
|
|
||||||
|
|
||||||
try:
|
|
||||||
sh.isort(_cwd=str(result.project_path))
|
|
||||||
except sh.ErrorReturnCode as e:
|
|
||||||
pytest.fail(e.stdout.decode())
|
|
||||||
|
|
||||||
|
|
||||||
@auto_fixable
|
|
||||||
@pytest.mark.parametrize("context_override", SUPPORTED_COMBINATIONS, ids=_fixture_id)
|
|
||||||
def test_django_upgrade_passes(cookies, context_override):
|
|
||||||
"""Check whether generated project passes django-upgrade."""
|
|
||||||
result = cookies.bake(extra_context=context_override)
|
|
||||||
|
|
||||||
python_files = [
|
|
||||||
file_path.removeprefix(f"{result.project_path}/")
|
|
||||||
for file_path in glob.glob(str(result.project_path / "**" / "*.py"), recursive=True)
|
|
||||||
]
|
|
||||||
try:
|
|
||||||
sh.django_upgrade(
|
|
||||||
"--target-version",
|
|
||||||
"5.0",
|
|
||||||
*python_files,
|
|
||||||
_cwd=str(result.project_path),
|
|
||||||
)
|
|
||||||
except sh.ErrorReturnCode as e:
|
|
||||||
pytest.fail(e.stdout.decode())
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("context_override", SUPPORTED_COMBINATIONS, ids=_fixture_id)
|
|
||||||
def test_djlint_lint_passes(cookies, context_override):
|
|
||||||
"""Check whether generated project passes djLint --lint."""
|
|
||||||
result = cookies.bake(extra_context=context_override)
|
|
||||||
|
|
||||||
autofixable_rules = "H014,T001"
|
|
||||||
# TODO: remove T002 when fixed https://github.com/Riverside-Healthcare/djLint/issues/687
|
|
||||||
ignored_rules = "H006,H030,H031,T002"
|
|
||||||
try:
|
|
||||||
sh.djlint(
|
|
||||||
"--lint",
|
|
||||||
"--ignore",
|
|
||||||
f"{autofixable_rules},{ignored_rules}",
|
|
||||||
".",
|
|
||||||
_cwd=str(result.project_path),
|
|
||||||
)
|
|
||||||
except sh.ErrorReturnCode as e:
|
|
||||||
pytest.fail(e.stdout.decode())
|
|
||||||
|
|
||||||
|
|
||||||
@auto_fixable
|
|
||||||
@pytest.mark.parametrize("context_override", SUPPORTED_COMBINATIONS, ids=_fixture_id)
|
|
||||||
def test_djlint_check_passes(cookies, context_override):
|
|
||||||
"""Check whether generated project passes djLint --check."""
|
|
||||||
result = cookies.bake(extra_context=context_override)
|
|
||||||
|
|
||||||
try:
|
|
||||||
sh.djlint("--check", ".", _cwd=str(result.project_path))
|
|
||||||
except sh.ErrorReturnCode as e:
|
|
||||||
pytest.fail(e.stdout.decode())
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
("use_docker", "expected_test_script"),
|
["use_docker", "expected_test_script"],
|
||||||
[
|
[
|
||||||
("n", "pytest"),
|
("n", "pytest"),
|
||||||
("y", "docker compose -f docker-compose.local.yml run django pytest"),
|
("y", "docker-compose -f local.yml run django pytest"),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
def test_travis_invokes_pytest(cookies, context, use_docker, expected_test_script):
|
def test_travis_invokes_pytest(cookies, context, use_docker, expected_test_script):
|
||||||
|
@ -288,23 +203,25 @@ def test_travis_invokes_pytest(cookies, context, use_docker, expected_test_scrip
|
||||||
assert result.project_path.name == context["project_slug"]
|
assert result.project_path.name == context["project_slug"]
|
||||||
assert result.project_path.is_dir()
|
assert result.project_path.is_dir()
|
||||||
|
|
||||||
with (result.project_path / ".travis.yml").open() as travis_yml:
|
with open(f"{result.project_path}/.travis.yml", "r") as travis_yml:
|
||||||
try:
|
try:
|
||||||
yml = yaml.safe_load(travis_yml)["jobs"]["include"]
|
yml = yaml.safe_load(travis_yml)["jobs"]["include"]
|
||||||
assert yml[0]["script"] == ["ruff check ."]
|
assert yml[0]["script"] == ["flake8"]
|
||||||
assert yml[1]["script"] == [expected_test_script]
|
assert yml[1]["script"] == [expected_test_script]
|
||||||
except yaml.YAMLError as e:
|
except yaml.YAMLError as e:
|
||||||
pytest.fail(str(e))
|
pytest.fail(str(e))
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
("use_docker", "expected_test_script"),
|
["use_docker", "expected_test_script"],
|
||||||
[
|
[
|
||||||
("n", "pytest"),
|
("n", "pytest"),
|
||||||
("y", "docker compose -f docker-compose.local.yml run django pytest"),
|
("y", "docker-compose -f local.yml run django pytest"),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
def test_gitlab_invokes_precommit_and_pytest(cookies, context, use_docker, expected_test_script):
|
def test_gitlab_invokes_flake8_and_pytest(
|
||||||
|
cookies, context, use_docker, expected_test_script
|
||||||
|
):
|
||||||
context.update({"ci_tool": "Gitlab", "use_docker": use_docker})
|
context.update({"ci_tool": "Gitlab", "use_docker": use_docker})
|
||||||
result = cookies.bake(extra_context=context)
|
result = cookies.bake(extra_context=context)
|
||||||
|
|
||||||
|
@ -313,25 +230,25 @@ def test_gitlab_invokes_precommit_and_pytest(cookies, context, use_docker, expec
|
||||||
assert result.project_path.name == context["project_slug"]
|
assert result.project_path.name == context["project_slug"]
|
||||||
assert result.project_path.is_dir()
|
assert result.project_path.is_dir()
|
||||||
|
|
||||||
with (result.project_path / ".gitlab-ci.yml").open() as gitlab_yml:
|
with open(f"{result.project_path}/.gitlab-ci.yml", "r") as gitlab_yml:
|
||||||
try:
|
try:
|
||||||
gitlab_config = yaml.safe_load(gitlab_yml)
|
gitlab_config = yaml.safe_load(gitlab_yml)
|
||||||
assert gitlab_config["precommit"]["script"] == [
|
assert gitlab_config["flake8"]["script"] == ["flake8"]
|
||||||
"pre-commit run --show-diff-on-failure --color=always --all-files",
|
|
||||||
]
|
|
||||||
assert gitlab_config["pytest"]["script"] == [expected_test_script]
|
assert gitlab_config["pytest"]["script"] == [expected_test_script]
|
||||||
except yaml.YAMLError as e:
|
except yaml.YAMLError as e:
|
||||||
pytest.fail(e)
|
pytest.fail(e)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
("use_docker", "expected_test_script"),
|
["use_docker", "expected_test_script"],
|
||||||
[
|
[
|
||||||
("n", "pytest"),
|
("n", "pytest"),
|
||||||
("y", "docker compose -f docker-compose.local.yml run django pytest"),
|
("y", "docker-compose -f local.yml run django pytest"),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
def test_github_invokes_linter_and_pytest(cookies, context, use_docker, expected_test_script):
|
def test_github_invokes_linter_and_pytest(
|
||||||
|
cookies, context, use_docker, expected_test_script
|
||||||
|
):
|
||||||
context.update({"ci_tool": "Github", "use_docker": use_docker})
|
context.update({"ci_tool": "Github", "use_docker": use_docker})
|
||||||
result = cookies.bake(extra_context=context)
|
result = cookies.bake(extra_context=context)
|
||||||
|
|
||||||
|
@ -340,7 +257,7 @@ def test_github_invokes_linter_and_pytest(cookies, context, use_docker, expected
|
||||||
assert result.project_path.name == context["project_slug"]
|
assert result.project_path.name == context["project_slug"]
|
||||||
assert result.project_path.is_dir()
|
assert result.project_path.is_dir()
|
||||||
|
|
||||||
with (result.project_path / ".github" / "workflows" / "ci.yml").open() as github_yml:
|
with open(f"{result.project_path}/.github/workflows/ci.yml", "r") as github_yml:
|
||||||
try:
|
try:
|
||||||
github_config = yaml.safe_load(github_yml)
|
github_config = yaml.safe_load(github_yml)
|
||||||
linter_present = False
|
linter_present = False
|
||||||
|
@ -380,37 +297,17 @@ def test_error_if_incompatible(cookies, context, invalid_context):
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
("editor", "pycharm_docs_exist"),
|
["use_pycharm", "pycharm_docs_exist"],
|
||||||
[
|
[
|
||||||
("None", False),
|
("n", False),
|
||||||
("PyCharm", True),
|
("y", True),
|
||||||
("VS Code", False),
|
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
def test_pycharm_docs_removed(cookies, context, editor, pycharm_docs_exist):
|
def test_pycharm_docs_removed(cookies, context, use_pycharm, pycharm_docs_exist):
|
||||||
context.update({"editor": editor})
|
"""."""
|
||||||
|
context.update({"use_pycharm": use_pycharm})
|
||||||
result = cookies.bake(extra_context=context)
|
result = cookies.bake(extra_context=context)
|
||||||
|
|
||||||
index_rst = result.project_path / "docs" / "index.rst"
|
with open(f"{result.project_path}/docs/index.rst", "r") as f:
|
||||||
has_pycharm_docs = "pycharm/configuration" in index_rst.read_text()
|
has_pycharm_docs = "pycharm/configuration" in f.read()
|
||||||
assert has_pycharm_docs is pycharm_docs_exist
|
assert has_pycharm_docs is pycharm_docs_exist
|
||||||
|
|
||||||
|
|
||||||
def test_trim_domain_email(cookies, context):
|
|
||||||
"""Check that leading and trailing spaces are trimmed in domain and email."""
|
|
||||||
context.update(
|
|
||||||
{
|
|
||||||
"use_docker": "y",
|
|
||||||
"domain_name": " example.com ",
|
|
||||||
"email": " me@example.com ",
|
|
||||||
},
|
|
||||||
)
|
|
||||||
result = cookies.bake(extra_context=context)
|
|
||||||
|
|
||||||
assert result.exit_code == 0
|
|
||||||
|
|
||||||
prod_django_env = result.project_path / ".envs" / ".production" / ".django"
|
|
||||||
assert "DJANGO_ALLOWED_HOSTS=.example.com" in prod_django_env.read_text()
|
|
||||||
|
|
||||||
base_settings = result.project_path / "config" / "settings" / "base.py"
|
|
||||||
assert '"me@example.com"' in base_settings.read_text()
|
|
||||||
|
|
|
@ -11,41 +11,33 @@ mkdir -p .cache/docker
|
||||||
cd .cache/docker
|
cd .cache/docker
|
||||||
|
|
||||||
# create the project using the default settings in cookiecutter.json
|
# create the project using the default settings in cookiecutter.json
|
||||||
uv run cookiecutter ../../ --no-input --overwrite-if-exists use_docker=y "$@"
|
cookiecutter ../../ --no-input --overwrite-if-exists use_docker=y "$@"
|
||||||
cd my_awesome_project
|
cd my_awesome_project
|
||||||
|
|
||||||
|
# Lint by running pre-commit on all files
|
||||||
|
# Needs a git repo to find the project root
|
||||||
|
# We don't have git inside Docker, so run it outside
|
||||||
|
git init
|
||||||
|
git add .
|
||||||
|
pre-commit run --show-diff-on-failure -a
|
||||||
|
|
||||||
# make sure all images build
|
# make sure all images build
|
||||||
docker compose -f docker-compose.local.yml build
|
docker-compose -f local.yml build
|
||||||
|
|
||||||
# run the project's type checks
|
# run the project's type checks
|
||||||
docker compose -f docker-compose.local.yml run --rm django mypy my_awesome_project
|
docker-compose -f local.yml run django mypy my_awesome_project
|
||||||
|
|
||||||
# run the project's tests
|
# run the project's tests
|
||||||
docker compose -f docker-compose.local.yml run --rm django pytest
|
docker-compose -f local.yml run django pytest
|
||||||
|
|
||||||
# return non-zero status code if there are migrations that have not been created
|
# return non-zero status code if there are migrations that have not been created
|
||||||
docker compose -f docker-compose.local.yml run --rm django python manage.py makemigrations --check || { echo "ERROR: there were changes in the models, but migration listed above have not been created and are not saved in version control"; exit 1; }
|
docker-compose -f local.yml run django python manage.py makemigrations --dry-run --check || { echo "ERROR: there were changes in the models, but migration listed above have not been created and are not saved in version control"; exit 1; }
|
||||||
|
|
||||||
# Test support for translations
|
# Test support for translations
|
||||||
docker compose -f docker-compose.local.yml run --rm django python manage.py makemessages --all
|
docker-compose -f local.yml run django python manage.py makemessages --all
|
||||||
|
|
||||||
# Make sure the check doesn't raise any warnings
|
# Make sure the check doesn't raise any warnings
|
||||||
docker compose -f docker-compose.local.yml run --rm \
|
docker-compose -f local.yml run django python manage.py check --fail-level WARNING
|
||||||
-e DJANGO_SECRET_KEY="$(openssl rand -base64 64)" \
|
|
||||||
-e REDIS_URL=redis://redis:6379/0 \
|
|
||||||
-e DJANGO_AWS_ACCESS_KEY_ID=x \
|
|
||||||
-e DJANGO_AWS_SECRET_ACCESS_KEY=x \
|
|
||||||
-e DJANGO_AWS_STORAGE_BUCKET_NAME=x \
|
|
||||||
-e DJANGO_ADMIN_URL=x \
|
|
||||||
-e MAILGUN_API_KEY=x \
|
|
||||||
-e MAILGUN_DOMAIN=x \
|
|
||||||
django python manage.py check --settings=config.settings.production --deploy --database default --fail-level WARNING
|
|
||||||
|
|
||||||
# Generate the HTML for the documentation
|
# Generate the HTML for the documentation
|
||||||
docker compose -f docker-compose.docs.yml run --rm docs make html
|
docker-compose -f local.yml run docs make html
|
||||||
|
|
||||||
# Run npm build script if package.json is present
|
|
||||||
if [ -f "package.json" ]
|
|
||||||
then
|
|
||||||
docker compose -f docker-compose.local.yml run --rm node npm run build
|
|
||||||
fi
|
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
"""Unit tests for the hooks"""
|
"""Unit tests for the hooks"""
|
||||||
|
|
||||||
import os
|
import os
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
|
|
||||||
|
@ -8,7 +7,7 @@ import pytest
|
||||||
from hooks.post_gen_project import append_to_gitignore_file
|
from hooks.post_gen_project import append_to_gitignore_file
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture()
|
||||||
def working_directory(tmp_path):
|
def working_directory(tmp_path):
|
||||||
prev_cwd = Path.cwd()
|
prev_cwd = Path.cwd()
|
||||||
os.chdir(tmp_path)
|
os.chdir(tmp_path)
|
||||||
|
@ -23,5 +22,7 @@ def test_append_to_gitignore_file(working_directory):
|
||||||
gitignore_file.write_text("node_modules/\n")
|
gitignore_file.write_text("node_modules/\n")
|
||||||
append_to_gitignore_file(".envs/*")
|
append_to_gitignore_file(".envs/*")
|
||||||
linesep = os.linesep.encode()
|
linesep = os.linesep.encode()
|
||||||
assert gitignore_file.read_bytes() == b"node_modules/" + linesep + b".envs/*" + linesep
|
assert (
|
||||||
|
gitignore_file.read_bytes() == b"node_modules/" + linesep + b".envs/*" + linesep
|
||||||
|
)
|
||||||
assert gitignore_file.read_text() == "node_modules/\n.envs/*\n"
|
assert gitignore_file.read_text() == "node_modules/\n.envs/*\n"
|
||||||
|
|
8
tox.ini
8
tox.ini
|
@ -1,11 +1,11 @@
|
||||||
[tox]
|
[tox]
|
||||||
skipsdist = true
|
skipsdist = true
|
||||||
envlist = py312,black-template
|
envlist = py39,black-template
|
||||||
|
|
||||||
[testenv]
|
[testenv]
|
||||||
passenv = AUTOFIXABLE_STYLES
|
deps = -rrequirements.txt
|
||||||
commands = pytest -n auto {posargs:./tests}
|
commands = pytest {posargs:./tests}
|
||||||
|
|
||||||
[testenv:black-template]
|
[testenv:black-template]
|
||||||
deps = black
|
deps = black
|
||||||
commands = black --check hooks tests docs scripts
|
commands = black --check hooks tests setup.py docs scripts
|
||||||
|
|
|
@ -1,20 +0,0 @@
|
||||||
|
|
||||||
#
|
|
||||||
# .bashrc.override.sh
|
|
||||||
#
|
|
||||||
|
|
||||||
# persistent bash history
|
|
||||||
HISTFILE=~/.bash_history
|
|
||||||
PROMPT_COMMAND="history -a; $PROMPT_COMMAND"
|
|
||||||
|
|
||||||
# set some django env vars
|
|
||||||
source /entrypoint
|
|
||||||
|
|
||||||
# restore default shell options
|
|
||||||
set +o errexit
|
|
||||||
set +o pipefail
|
|
||||||
set +o nounset
|
|
||||||
|
|
||||||
# start ssh-agent
|
|
||||||
# https://code.visualstudio.com/docs/remote/troubleshooting
|
|
||||||
eval "$(ssh-agent -s)"
|
|
|
@ -1,70 +0,0 @@
|
||||||
// For format details, see https://containers.dev/implementors/json_reference/
|
|
||||||
{
|
|
||||||
"name": "{{cookiecutter.project_slug}}_dev",
|
|
||||||
"dockerComposeFile": [
|
|
||||||
"../docker-compose.local.yml"
|
|
||||||
],
|
|
||||||
"init": true,
|
|
||||||
"mounts": [
|
|
||||||
{
|
|
||||||
"source": "./.devcontainer/bash_history",
|
|
||||||
"target": "/home/dev-user/.bash_history",
|
|
||||||
"type": "bind"
|
|
||||||
},
|
|
||||||
{
|
|
||||||
"source": "~/.ssh",
|
|
||||||
"target": "/home/dev-user/.ssh",
|
|
||||||
"type": "bind"
|
|
||||||
}
|
|
||||||
],
|
|
||||||
// Tells devcontainer.json supporting services / tools whether they should run
|
|
||||||
// /bin/sh -c "while sleep 1000; do :; done" when starting the container instead of the container’s default command
|
|
||||||
"overrideCommand": false,
|
|
||||||
"service": "django",
|
|
||||||
// "remoteEnv": {"PATH": "/home/dev-user/.local/bin:${containerEnv:PATH}"},
|
|
||||||
"remoteUser": "dev-user",
|
|
||||||
"workspaceFolder": "/app",
|
|
||||||
// Set *default* container specific settings.json values on container create.
|
|
||||||
"customizations": {
|
|
||||||
{%- if cookiecutter.editor == "VS Code" %}
|
|
||||||
"vscode": {
|
|
||||||
"settings": {
|
|
||||||
"editor.formatOnSave": true,
|
|
||||||
"[python]": {
|
|
||||||
"analysis.autoImportCompletions": true,
|
|
||||||
"analysis.typeCheckingMode": "basic",
|
|
||||||
"defaultInterpreterPath": "/usr/local/bin/python",
|
|
||||||
"editor.codeActionsOnSave": {
|
|
||||||
"source.organizeImports": "always"
|
|
||||||
},
|
|
||||||
"editor.defaultFormatter": "charliermarsh.ruff",
|
|
||||||
"languageServer": "Pylance",
|
|
||||||
"linting.enabled": true,
|
|
||||||
"linting.mypyEnabled": true,
|
|
||||||
"linting.mypyPath": "/usr/local/bin/mypy",
|
|
||||||
}
|
|
||||||
},
|
|
||||||
// https://code.visualstudio.com/docs/remote/devcontainerjson-reference#_vs-code-specific-properties
|
|
||||||
// Add the IDs of extensions you want installed when the container is created.
|
|
||||||
"extensions": [
|
|
||||||
"davidanson.vscode-markdownlint",
|
|
||||||
"mrmlnc.vscode-duplicate",
|
|
||||||
"visualstudioexptteam.vscodeintellicode",
|
|
||||||
"visualstudioexptteam.intellicode-api-usage-examples",
|
|
||||||
// python
|
|
||||||
"ms-python.python",
|
|
||||||
"ms-python.vscode-pylance",
|
|
||||||
"charliermarsh.ruff",
|
|
||||||
// django
|
|
||||||
"batisteo.vscode-django"
|
|
||||||
]
|
|
||||||
}
|
|
||||||
{%- endif %}
|
|
||||||
},
|
|
||||||
// Uncomment the next line if you want start specific services in your Docker Compose config.
|
|
||||||
// "runServices": [],
|
|
||||||
// Uncomment the next line if you want to keep your containers running after VS Code shuts down.
|
|
||||||
// "shutdownAction": "none",
|
|
||||||
// Uncomment the next line to run commands after the container is created.
|
|
||||||
"postCreateCommand": "cat .devcontainer/bashrc.override.sh >> ~/.bashrc"
|
|
||||||
}
|
|
|
@ -8,5 +8,3 @@
|
||||||
.readthedocs.yml
|
.readthedocs.yml
|
||||||
.travis.yml
|
.travis.yml
|
||||||
venv
|
venv
|
||||||
.git
|
|
||||||
.envs/
|
|
||||||
|
|
|
@ -1,49 +0,0 @@
|
||||||
kind: pipeline
|
|
||||||
name: default
|
|
||||||
|
|
||||||
environment:
|
|
||||||
POSTGRES_USER: '{{ cookiecutter.project_slug }}'
|
|
||||||
POSTGRES_PASSWORD: ''
|
|
||||||
POSTGRES_DB: 'test_{{ cookiecutter.project_slug }}'
|
|
||||||
POSTGRES_HOST_AUTH_METHOD: trust
|
|
||||||
{%- if cookiecutter.use_celery == 'y' %}
|
|
||||||
REDIS_URL: 'redis://redis:6379/0'
|
|
||||||
{%- endif %}
|
|
||||||
|
|
||||||
steps:
|
|
||||||
- name: lint
|
|
||||||
pull: if-not-exists
|
|
||||||
image: python:3.12
|
|
||||||
environment:
|
|
||||||
PRE_COMMIT_HOME: ${CI_PROJECT_DIR}/.cache/pre-commit
|
|
||||||
volumes:
|
|
||||||
- name: pre-commit cache
|
|
||||||
path: ${PRE_COMMIT_HOME}
|
|
||||||
commands:
|
|
||||||
- export PRE_COMMIT_HOME=$CI_PROJECT_DIR/.cache/pre-commit
|
|
||||||
- pip install -q pre-commit
|
|
||||||
- pre-commit run --show-diff-on-failure --color=always --all-files
|
|
||||||
|
|
||||||
- name: test
|
|
||||||
pull: if-not-exists
|
|
||||||
{%- if cookiecutter.use_docker == 'y' %}
|
|
||||||
image: docker:25.0
|
|
||||||
environment:
|
|
||||||
DATABASE_URL: pgsql://$POSTGRES_USER:$POSTGRES_PASSWORD@postgres/$POSTGRES_DB
|
|
||||||
commands:
|
|
||||||
- docker-compose -f docker-compose.local.yml build
|
|
||||||
- docker-compose -f docker-compose.docs.yml build
|
|
||||||
- docker-compose -f docker-compose.local.yml run --rm django python manage.py migrate
|
|
||||||
- docker-compose -f docker-compose.local.yml up -d
|
|
||||||
- docker-compose -f docker-compose.local.yml run django pytest
|
|
||||||
{%- else %}
|
|
||||||
image: python:3.12
|
|
||||||
commands:
|
|
||||||
- pip install -r requirements/local.txt
|
|
||||||
- pytest
|
|
||||||
{%- endif%}
|
|
||||||
|
|
||||||
volumes:
|
|
||||||
- name: pre-commit cache
|
|
||||||
host:
|
|
||||||
path: /tmp/drone/cache/pre-commit
|
|
|
@ -12,7 +12,7 @@ trim_trailing_whitespace = true
|
||||||
indent_style = space
|
indent_style = space
|
||||||
indent_size = 4
|
indent_size = 4
|
||||||
|
|
||||||
[*.{html,css,scss,json,yml,xml,toml}]
|
[*.{html,css,scss,json,yml,xml}]
|
||||||
indent_style = space
|
indent_style = space
|
||||||
indent_size = 2
|
indent_size = 2
|
||||||
|
|
||||||
|
@ -22,6 +22,6 @@ trim_trailing_whitespace = false
|
||||||
[Makefile]
|
[Makefile]
|
||||||
indent_style = tab
|
indent_style = tab
|
||||||
|
|
||||||
[default.conf]
|
[nginx.conf]
|
||||||
indent_style = space
|
indent_style = space
|
||||||
indent_size = 2
|
indent_size = 2
|
||||||
|
|
|
@ -28,8 +28,8 @@ POSTMARK_SERVER_TOKEN=
|
||||||
SENDGRID_API_KEY=
|
SENDGRID_API_KEY=
|
||||||
SENDGRID_GENERATE_MESSAGE_ID=True
|
SENDGRID_GENERATE_MESSAGE_ID=True
|
||||||
SENDGRID_MERGE_FIELD_FORMAT=None
|
SENDGRID_MERGE_FIELD_FORMAT=None
|
||||||
{% elif cookiecutter.mail_service == 'Brevo' %}
|
{% elif cookiecutter.mail_service == 'SendinBlue' %}
|
||||||
BREVO_API_KEY=
|
SENDINBLUE_API_KEY=
|
||||||
{% elif cookiecutter.mail_service == 'SparkPost' %}
|
{% elif cookiecutter.mail_service == 'SparkPost' %}
|
||||||
SPARKPOST_API_KEY=
|
SPARKPOST_API_KEY=
|
||||||
{% endif %}
|
{% endif %}
|
||||||
|
@ -44,12 +44,6 @@ DJANGO_AWS_STORAGE_BUCKET_NAME=
|
||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
GOOGLE_APPLICATION_CREDENTIALS=
|
GOOGLE_APPLICATION_CREDENTIALS=
|
||||||
DJANGO_GCP_STORAGE_BUCKET_NAME=
|
DJANGO_GCP_STORAGE_BUCKET_NAME=
|
||||||
{% elif cookiecutter.cloud_provider == 'Azure' %}
|
|
||||||
# Azure
|
|
||||||
# ------------------------------------------------------------------------------
|
|
||||||
DJANGO_AZURE_ACCOUNT_KEY=
|
|
||||||
DJANGO_AZURE_ACCOUNT_NAME=
|
|
||||||
DJANGO_AZURE_CONTAINER_NAME=
|
|
||||||
{% endif %}
|
{% endif %}
|
||||||
# django-allauth
|
# django-allauth
|
||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
|
|
127
{{cookiecutter.project_slug}}/.github/dependabot.yml
vendored
127
{{cookiecutter.project_slug}}/.github/dependabot.yml
vendored
|
@ -1,86 +1,95 @@
|
||||||
# Config for Dependabot updates. See Documentation here:
|
# Config for Dependabot updates. See Documentation here:
|
||||||
# https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
|
# https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
|
||||||
|
|
||||||
version: 2
|
version: 2
|
||||||
updates:
|
updates:
|
||||||
# Update GitHub actions in workflows
|
# Update GitHub actions in workflows
|
||||||
- package-ecosystem: 'github-actions'
|
- package-ecosystem: "github-actions"
|
||||||
directory: '/'
|
directory: "/"
|
||||||
# Every weekday
|
# Check for updates to GitHub Actions every weekday
|
||||||
schedule:
|
schedule:
|
||||||
interval: 'daily'
|
interval: "daily"
|
||||||
groups:
|
|
||||||
github-actions:
|
|
||||||
patterns:
|
|
||||||
- '*'
|
|
||||||
|
|
||||||
{%- if cookiecutter.use_docker == 'y' %}
|
{%- if cookiecutter.use_docker == 'y' %}
|
||||||
|
|
||||||
# Enable version updates for Docker
|
# Enable version updates for Docker
|
||||||
- package-ecosystem: 'docker'
|
# We need to specify each Dockerfile in a separate entry because Dependabot doesn't
|
||||||
|
# support wildcards or recursively checking subdirectories. Check this issue for updates:
|
||||||
|
# https://github.com/dependabot/dependabot-core/issues/2178
|
||||||
|
- package-ecosystem: "docker"
|
||||||
# Look for a `Dockerfile` in the `compose/local/django` directory
|
# Look for a `Dockerfile` in the `compose/local/django` directory
|
||||||
directories:
|
directory: "compose/local/django/"
|
||||||
- 'compose/local/django/'
|
# Check for updates to GitHub Actions every weekday
|
||||||
- 'compose/local/docs/'
|
|
||||||
- 'compose/production/django/'
|
|
||||||
# Every weekday
|
|
||||||
schedule:
|
schedule:
|
||||||
interval: 'daily'
|
interval: "daily"
|
||||||
# Ignore minor version updates (3.10 -> 3.11) but update patch versions
|
|
||||||
ignore:
|
|
||||||
- dependency-name: '*'
|
|
||||||
update-types:
|
|
||||||
- 'version-update:semver-major'
|
|
||||||
- 'version-update:semver-minor'
|
|
||||||
groups:
|
|
||||||
docker-python:
|
|
||||||
patterns:
|
|
||||||
- '*'
|
|
||||||
|
|
||||||
|
# Enable version updates for Docker
|
||||||
- package-ecosystem: 'docker'
|
- package-ecosystem: "docker"
|
||||||
# Look for a `Dockerfile` in the listed directories
|
# Look for a `Dockerfile` in the `compose/local/docs` directory
|
||||||
directories:
|
directory: "compose/local/docs/"
|
||||||
- 'compose/local/node/'
|
# Check for updates to GitHub Actions every weekday
|
||||||
- 'compose/production/aws/'
|
|
||||||
- 'compose/production/postgres/'
|
|
||||||
- 'compose/production/traefik/'
|
|
||||||
{%- if cookiecutter.cloud_provider == 'None' %}
|
|
||||||
- 'compose/production/nginx/'
|
|
||||||
{%- endif %}
|
|
||||||
# Every weekday
|
|
||||||
schedule:
|
schedule:
|
||||||
interval: 'daily'
|
interval: "daily"
|
||||||
|
|
||||||
|
# Enable version updates for Docker
|
||||||
|
- package-ecosystem: "docker"
|
||||||
|
# Look for a `Dockerfile` in the `compose/local/node` directory
|
||||||
|
directory: "compose/local/node/"
|
||||||
|
# Check for updates to GitHub Actions every weekday
|
||||||
|
schedule:
|
||||||
|
interval: "daily"
|
||||||
|
|
||||||
|
# Enable version updates for Docker
|
||||||
|
- package-ecosystem: "docker"
|
||||||
|
# Look for a `Dockerfile` in the `compose/production/aws` directory
|
||||||
|
directory: "compose/production/aws/"
|
||||||
|
# Check for updates to GitHub Actions every weekday
|
||||||
|
schedule:
|
||||||
|
interval: "daily"
|
||||||
|
|
||||||
|
# Enable version updates for Docker
|
||||||
|
- package-ecosystem: "docker"
|
||||||
|
# Look for a `Dockerfile` in the `compose/production/django` directory
|
||||||
|
directory: "compose/production/django/"
|
||||||
|
# Check for updates to GitHub Actions every weekday
|
||||||
|
schedule:
|
||||||
|
interval: "daily"
|
||||||
|
|
||||||
|
# Enable version updates for Docker
|
||||||
|
- package-ecosystem: "docker"
|
||||||
|
# Look for a `Dockerfile` in the `compose/production/postgres` directory
|
||||||
|
directory: "compose/production/postgres/"
|
||||||
|
# Check for updates to GitHub Actions every weekday
|
||||||
|
schedule:
|
||||||
|
interval: "daily"
|
||||||
|
|
||||||
|
# Enable version updates for Docker
|
||||||
|
- package-ecosystem: "docker"
|
||||||
|
# Look for a `Dockerfile` in the `compose/production/traefik` directory
|
||||||
|
directory: "compose/production/traefik/"
|
||||||
|
# Check for updates to GitHub Actions every weekday
|
||||||
|
schedule:
|
||||||
|
interval: "daily"
|
||||||
|
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
||||||
# Enable version updates for Python/Pip - Production
|
# Enable version updates for Python/Pip - Production
|
||||||
- package-ecosystem: 'pip'
|
- package-ecosystem: "pip"
|
||||||
# Look for a `requirements.txt` in the `root` directory
|
# Look for a `requirements.txt` in the `root` directory
|
||||||
# also 'setup.cfg', '.python-version' and 'requirements/*.txt'
|
# also 'setup.cfg', 'runtime.txt' and 'requirements/*.txt'
|
||||||
directory: '/'
|
directory: "/"
|
||||||
# Every weekday
|
# Check for updates to GitHub Actions every weekday
|
||||||
schedule:
|
schedule:
|
||||||
interval: 'daily'
|
interval: "daily"
|
||||||
groups:
|
|
||||||
python:
|
|
||||||
update-types:
|
|
||||||
- 'minor'
|
|
||||||
- 'patch'
|
|
||||||
|
|
||||||
{%- if cookiecutter.frontend_pipeline == 'Gulp' %}
|
{%- if cookiecutter.frontend_pipeline == 'Gulp' %}
|
||||||
|
|
||||||
# Enable version updates for javascript/npm
|
# Enable version updates for javascript/npm
|
||||||
- package-ecosystem: 'npm'
|
- package-ecosystem: "npm"
|
||||||
# Look for a `packages.json` in the `root` directory
|
# Look for a `packages.json' in the `root` directory
|
||||||
directory: '/'
|
directory: "/"
|
||||||
# Every weekday
|
# Check for updates to GitHub Actions every weekday
|
||||||
schedule:
|
schedule:
|
||||||
interval: 'daily'
|
interval: "daily"
|
||||||
groups:
|
|
||||||
javascript:
|
|
||||||
update-types:
|
|
||||||
- 'minor'
|
|
||||||
- 'patch'
|
|
||||||
|
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
|
@ -7,12 +7,12 @@ env:
|
||||||
|
|
||||||
on:
|
on:
|
||||||
pull_request:
|
pull_request:
|
||||||
branches: ['master', 'main']
|
branches: [ "master", "main" ]
|
||||||
paths-ignore: ['docs/**']
|
paths-ignore: [ "docs/**" ]
|
||||||
|
|
||||||
push:
|
push:
|
||||||
branches: ['master', 'main']
|
branches: [ "master", "main" ]
|
||||||
paths-ignore: ['docs/**']
|
paths-ignore: [ "docs/**" ]
|
||||||
|
|
||||||
concurrency:
|
concurrency:
|
||||||
group: {% raw %}${{ github.head_ref || github.run_id }}{% endraw %}
|
group: {% raw %}${{ github.head_ref || github.run_id }}{% endraw %}
|
||||||
|
@ -22,21 +22,23 @@ jobs:
|
||||||
linter:
|
linter:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
steps:
|
steps:
|
||||||
|
|
||||||
- name: Checkout Code Repository
|
- name: Checkout Code Repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v3
|
||||||
with:
|
with:
|
||||||
python-version-file: '.python-version'
|
python-version: "3.9"
|
||||||
|
cache: pip
|
||||||
|
cache-dependency-path: |
|
||||||
|
requirements/base.txt
|
||||||
|
requirements/local.txt
|
||||||
|
|
||||||
{%- if cookiecutter.open_source_license != 'Not open source' %}
|
|
||||||
# Consider using pre-commit.ci for open source project
|
|
||||||
{%- endif %}
|
|
||||||
- name: Run pre-commit
|
- name: Run pre-commit
|
||||||
uses: pre-commit/action@v3.0.1
|
uses: pre-commit/action@v2.0.3
|
||||||
|
|
||||||
# With no caching at all the entire ci process takes 3m to complete!
|
# With no caching at all the entire ci process takes 4m 30s to complete!
|
||||||
pytest:
|
pytest:
|
||||||
runs-on: ubuntu-latest
|
runs-on: ubuntu-latest
|
||||||
{%- if cookiecutter.use_docker == 'n' %}
|
{%- if cookiecutter.use_docker == 'n' %}
|
||||||
|
@ -49,7 +51,7 @@ jobs:
|
||||||
- 6379:6379
|
- 6379:6379
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
postgres:
|
postgres:
|
||||||
image: postgres:{{ cookiecutter.postgresql_version }}
|
image: postgres:12
|
||||||
ports:
|
ports:
|
||||||
- 5432:5432
|
- 5432:5432
|
||||||
env:
|
env:
|
||||||
|
@ -57,60 +59,35 @@ jobs:
|
||||||
|
|
||||||
env:
|
env:
|
||||||
{%- if cookiecutter.use_celery == 'y' %}
|
{%- if cookiecutter.use_celery == 'y' %}
|
||||||
REDIS_URL: 'redis://localhost:6379/0'
|
CELERY_BROKER_URL: "redis://localhost:6379/0"
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
# postgres://user:password@host:port/database
|
# postgres://user:password@host:port/database
|
||||||
DATABASE_URL: 'postgres://postgres:postgres@localhost:5432/postgres'
|
DATABASE_URL: "postgres://postgres:postgres@localhost:5432/postgres"
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
||||||
steps:
|
steps:
|
||||||
|
|
||||||
- name: Checkout Code Repository
|
- name: Checkout Code Repository
|
||||||
uses: actions/checkout@v4
|
uses: actions/checkout@v3
|
||||||
{%- if cookiecutter.use_docker == 'y' %}
|
{%- if cookiecutter.use_docker == 'y' %}
|
||||||
|
|
||||||
- name: Set up Docker Buildx
|
- name: Build the Stack
|
||||||
uses: docker/setup-buildx-action@v3
|
run: docker-compose -f local.yml build
|
||||||
|
|
||||||
- name: Build and cache local backend
|
|
||||||
uses: docker/bake-action@v6
|
|
||||||
with:
|
|
||||||
push: false
|
|
||||||
load: true
|
|
||||||
files: docker-compose.local.yml
|
|
||||||
targets: django
|
|
||||||
set: |
|
|
||||||
django.cache-from=type=gha,scope=django-cached-tests
|
|
||||||
django.cache-to=type=gha,scope=django-cached-tests,mode=max
|
|
||||||
postgres.cache-from=type=gha,scope=postgres-cached-tests
|
|
||||||
postgres.cache-to=type=gha,scope=postgres-cached-tests,mode=max
|
|
||||||
|
|
||||||
- name: Build and cache docs
|
|
||||||
uses: docker/bake-action@v6
|
|
||||||
with:
|
|
||||||
push: false
|
|
||||||
load: true
|
|
||||||
files: docker-compose.docs.yml
|
|
||||||
set: |
|
|
||||||
docs.cache-from=type=gha,scope=cached-docs
|
|
||||||
docs.cache-to=type=gha,scope=cached-docs,mode=max
|
|
||||||
|
|
||||||
- name: Check DB Migrations
|
|
||||||
run: docker compose -f docker-compose.local.yml run --rm django python manage.py makemigrations --check
|
|
||||||
|
|
||||||
- name: Run DB Migrations
|
- name: Run DB Migrations
|
||||||
run: docker compose -f docker-compose.local.yml run --rm django python manage.py migrate
|
run: docker-compose -f local.yml run --rm django python manage.py migrate
|
||||||
|
|
||||||
- name: Run Django Tests
|
- name: Run Django Tests
|
||||||
run: docker compose -f docker-compose.local.yml run django pytest
|
run: docker-compose -f local.yml run django pytest
|
||||||
|
|
||||||
- name: Tear down the Stack
|
- name: Tear down the Stack
|
||||||
run: docker compose -f docker-compose.local.yml down
|
run: docker-compose -f local.yml down
|
||||||
{%- else %}
|
{%- else %}
|
||||||
|
|
||||||
- name: Set up Python
|
- name: Set up Python
|
||||||
uses: actions/setup-python@v5
|
uses: actions/setup-python@v3
|
||||||
with:
|
with:
|
||||||
python-version-file: '.python-version'
|
python-version: "3.9"
|
||||||
cache: pip
|
cache: pip
|
||||||
cache-dependency-path: |
|
cache-dependency-path: |
|
||||||
requirements/base.txt
|
requirements/base.txt
|
||||||
|
@ -121,12 +98,6 @@ jobs:
|
||||||
python -m pip install --upgrade pip
|
python -m pip install --upgrade pip
|
||||||
pip install -r requirements/local.txt
|
pip install -r requirements/local.txt
|
||||||
|
|
||||||
- name: Check DB Migrations
|
|
||||||
run: python manage.py makemigrations --check
|
|
||||||
|
|
||||||
- name: Run DB Migrations
|
|
||||||
run: python manage.py migrate
|
|
||||||
|
|
||||||
- name: Test with pytest
|
- name: Test with pytest
|
||||||
run: pytest
|
run: pytest
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
22
{{cookiecutter.project_slug}}/.gitignore
vendored
22
{{cookiecutter.project_slug}}/.gitignore
vendored
|
@ -59,6 +59,9 @@ docs/_build/
|
||||||
# PyBuilder
|
# PyBuilder
|
||||||
target/
|
target/
|
||||||
|
|
||||||
|
# pyenv
|
||||||
|
.python-version
|
||||||
|
|
||||||
{% if cookiecutter.use_celery == 'y' -%}
|
{% if cookiecutter.use_celery == 'y' -%}
|
||||||
# celery beat schedule file
|
# celery beat schedule file
|
||||||
celerybeat-schedule
|
celerybeat-schedule
|
||||||
|
@ -158,10 +161,11 @@ typings/
|
||||||
!.vscode/extensions.json
|
!.vscode/extensions.json
|
||||||
*.code-workspace
|
*.code-workspace
|
||||||
|
|
||||||
# Local History for devcontainer
|
# Local History for Visual Studio Code
|
||||||
.devcontainer/bash_history
|
.history/
|
||||||
|
|
||||||
{% if cookiecutter.editor == 'PyCharm' -%}
|
|
||||||
|
{% if cookiecutter.use_pycharm == 'y' -%}
|
||||||
# Provided default Pycharm Run/Debug Configurations should be tracked by git
|
# Provided default Pycharm Run/Debug Configurations should be tracked by git
|
||||||
# In case of local modifications made by Pycharm, use update-index command
|
# In case of local modifications made by Pycharm, use update-index command
|
||||||
# for each changed file, like this:
|
# for each changed file, like this:
|
||||||
|
@ -322,12 +326,9 @@ Session.vim
|
||||||
# Auto-generated tag files
|
# Auto-generated tag files
|
||||||
tags
|
tags
|
||||||
|
|
||||||
# Redis dump file
|
|
||||||
dump.rdb
|
|
||||||
|
|
||||||
### Project template
|
### Project template
|
||||||
{%- if cookiecutter.use_mailpit == 'y' and cookiecutter.use_docker == 'n' %}
|
{%- if cookiecutter.use_mailhog == 'y' and cookiecutter.use_docker == 'n' %}
|
||||||
mailpit
|
MailHog
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
{{ cookiecutter.project_slug }}/media/
|
{{ cookiecutter.project_slug }}/media/
|
||||||
|
|
||||||
|
@ -342,9 +343,4 @@ project.css
|
||||||
project.min.css
|
project.min.css
|
||||||
vendors.js
|
vendors.js
|
||||||
*.min.js
|
*.min.js
|
||||||
*.min.js.map
|
|
||||||
{%- endif %}
|
|
||||||
{%- if cookiecutter.frontend_pipeline == 'Webpack' %}
|
|
||||||
{{ cookiecutter.project_slug }}/static/webpack_bundles/
|
|
||||||
webpack-stats.json
|
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
|
@ -7,45 +7,46 @@ variables:
|
||||||
POSTGRES_PASSWORD: ''
|
POSTGRES_PASSWORD: ''
|
||||||
POSTGRES_DB: 'test_{{ cookiecutter.project_slug }}'
|
POSTGRES_DB: 'test_{{ cookiecutter.project_slug }}'
|
||||||
POSTGRES_HOST_AUTH_METHOD: trust
|
POSTGRES_HOST_AUTH_METHOD: trust
|
||||||
{%- if cookiecutter.use_celery == 'y' %}
|
{% if cookiecutter.use_celery == 'y' -%}
|
||||||
REDIS_URL: 'redis://redis:6379/0'
|
CELERY_BROKER_URL: 'redis://redis:6379/0'
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
||||||
precommit:
|
flake8:
|
||||||
stage: lint
|
stage: lint
|
||||||
image: python:3.12
|
image: python:3.9-alpine
|
||||||
variables:
|
|
||||||
PRE_COMMIT_HOME: ${CI_PROJECT_DIR}/.cache/pre-commit
|
|
||||||
cache:
|
|
||||||
paths:
|
|
||||||
- ${PRE_COMMIT_HOME}
|
|
||||||
before_script:
|
before_script:
|
||||||
- pip install -q pre-commit
|
- pip install -q flake8
|
||||||
script:
|
script:
|
||||||
- pre-commit run --show-diff-on-failure --color=always --all-files
|
- flake8
|
||||||
|
|
||||||
pytest:
|
pytest:
|
||||||
stage: test
|
stage: test
|
||||||
{%- if cookiecutter.use_docker == 'y' %}
|
{% if cookiecutter.use_docker == 'y' -%}
|
||||||
image: docker:25.0
|
image: docker/compose:1.29.2
|
||||||
|
tags:
|
||||||
|
- docker
|
||||||
services:
|
services:
|
||||||
- docker:dind
|
- docker:dind
|
||||||
before_script:
|
before_script:
|
||||||
- docker compose -f docker-compose.local.yml build
|
- docker-compose -f local.yml build
|
||||||
- docker compose -f docker-compose.docs.yml build
|
|
||||||
# Ensure celerybeat does not crash due to non-existent tables
|
# Ensure celerybeat does not crash due to non-existent tables
|
||||||
- docker compose -f docker-compose.local.yml run --rm django python manage.py migrate
|
- docker-compose -f local.yml run --rm django python manage.py migrate
|
||||||
- docker compose -f docker-compose.local.yml up -d
|
- docker-compose -f local.yml up -d
|
||||||
script:
|
script:
|
||||||
- docker compose -f docker-compose.local.yml run django pytest
|
- docker-compose -f local.yml run django pytest
|
||||||
{%- else %}
|
{%- else -%}
|
||||||
image: python:3.12
|
image: python:3.9
|
||||||
|
tags:
|
||||||
|
- python
|
||||||
services:
|
services:
|
||||||
- postgres:{{ cookiecutter.postgresql_version }}
|
- postgres:{{ cookiecutter.postgresql_version }}
|
||||||
variables:
|
variables:
|
||||||
DATABASE_URL: pgsql://$POSTGRES_USER:$POSTGRES_PASSWORD@postgres/$POSTGRES_DB
|
DATABASE_URL: pgsql://$POSTGRES_USER:$POSTGRES_PASSWORD@postgres/$POSTGRES_DB
|
||||||
|
|
||||||
before_script:
|
before_script:
|
||||||
- pip install -r requirements/local.txt
|
- pip install -r requirements/local.txt
|
||||||
|
|
||||||
script:
|
script:
|
||||||
- pytest
|
- pytest
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
||||||
|
|
|
@ -10,12 +10,12 @@
|
||||||
<option value="celeryworker"/>
|
<option value="celeryworker"/>
|
||||||
<option value="celerybeat"/>
|
<option value="celerybeat"/>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
{%- if cookiecutter.frontend_pipeline in ['Gulp', 'Webpack'] %}
|
{%- if cookiecutter.frontend_pipeline == 'Gulp' %}
|
||||||
<option value="node"/>
|
<option value="node"/>
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
</list>
|
</list>
|
||||||
</option>
|
</option>
|
||||||
<option name="sourceFilePath" value="docker-compose.local.yml"/>
|
<option name="sourceFilePath" value="local.yml"/>
|
||||||
</settings>
|
</settings>
|
||||||
</deployment>
|
</deployment>
|
||||||
<method v="2"/>
|
<method v="2"/>
|
||||||
|
|
|
@ -8,7 +8,7 @@
|
||||||
<option value="docs"/>
|
<option value="docs"/>
|
||||||
</list>
|
</list>
|
||||||
</option>
|
</option>
|
||||||
<option name="sourceFilePath" value="docker-compose.local.yml"/>
|
<option name="sourceFilePath" value="local.yml"/>
|
||||||
</settings>
|
</settings>
|
||||||
</deployment>
|
</deployment>
|
||||||
<method v="2"/>
|
<method v="2"/>
|
||||||
|
|
|
@ -13,7 +13,7 @@
|
||||||
</facet>
|
</facet>
|
||||||
</component>
|
</component>
|
||||||
<component name="NewModuleRootManager">
|
<component name="NewModuleRootManager">
|
||||||
{% if cookiecutter.frontend_pipeline in ['Gulp', 'Webpack'] %}
|
{% if cookiecutter.frontend_pipeline == 'Gulp' %}
|
||||||
<content url="file://$MODULE_DIR$">
|
<content url="file://$MODULE_DIR$">
|
||||||
<excludeFolder url="file://$MODULE_DIR$/node_modules" />
|
<excludeFolder url="file://$MODULE_DIR$/node_modules" />
|
||||||
</content>
|
</content>
|
||||||
|
|
|
@ -1,54 +1,30 @@
|
||||||
exclude: '^docs/|/migrations/|devcontainer.json'
|
exclude: "^docs/|/migrations/"
|
||||||
default_stages: [pre-commit]
|
default_stages: [commit]
|
||||||
minimum_pre_commit_version: "3.2.0"
|
|
||||||
|
|
||||||
default_language_version:
|
|
||||||
python: python3.12
|
|
||||||
|
|
||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
rev: v5.0.0
|
rev: v4.1.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: trailing-whitespace
|
- id: trailing-whitespace
|
||||||
- id: end-of-file-fixer
|
- id: end-of-file-fixer
|
||||||
- id: check-json
|
|
||||||
- id: check-toml
|
|
||||||
- id: check-xml
|
|
||||||
- id: check-yaml
|
- id: check-yaml
|
||||||
- id: debug-statements
|
|
||||||
- id: check-builtin-literals
|
|
||||||
- id: check-case-conflict
|
|
||||||
- id: check-docstring-first
|
|
||||||
- id: detect-private-key
|
|
||||||
|
|
||||||
- repo: https://github.com/pre-commit/mirrors-prettier
|
- repo: https://github.com/psf/black
|
||||||
rev: v4.0.0-alpha.8
|
rev: 22.3.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: prettier
|
- id: black
|
||||||
args: ['--tab-width', '2', '--single-quote']
|
|
||||||
exclude: '{{cookiecutter.project_slug}}/templates/'
|
|
||||||
|
|
||||||
- repo: https://github.com/adamchainz/django-upgrade
|
- repo: https://github.com/PyCQA/isort
|
||||||
rev: '1.25.0'
|
rev: 5.10.1
|
||||||
hooks:
|
hooks:
|
||||||
- id: django-upgrade
|
- id: isort
|
||||||
args: ['--target-version', '5.0']
|
|
||||||
|
|
||||||
# Run the Ruff linter.
|
- repo: https://github.com/PyCQA/flake8
|
||||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
rev: 4.0.1
|
||||||
rev: v0.12.5
|
|
||||||
hooks:
|
hooks:
|
||||||
# Linter
|
- id: flake8
|
||||||
- id: ruff-check
|
args: ["--config=setup.cfg"]
|
||||||
args: [--fix, --exit-non-zero-on-fix]
|
additional_dependencies: [flake8-isort]
|
||||||
# Formatter
|
|
||||||
- id: ruff-format
|
|
||||||
|
|
||||||
- repo: https://github.com/Riverside-Healthcare/djLint
|
|
||||||
rev: v1.36.4
|
|
||||||
hooks:
|
|
||||||
- id: djlint-reformat-django
|
|
||||||
- id: djlint-django
|
|
||||||
|
|
||||||
# sets up .pre-commit-ci.yaml to ensure pre-commit dependencies stay up to date
|
# sets up .pre-commit-ci.yaml to ensure pre-commit dependencies stay up to date
|
||||||
ci:
|
ci:
|
||||||
|
|
14
{{cookiecutter.project_slug}}/.pylintrc
Normal file
14
{{cookiecutter.project_slug}}/.pylintrc
Normal file
|
@ -0,0 +1,14 @@
|
||||||
|
[MASTER]
|
||||||
|
load-plugins=pylint_django{% if cookiecutter.use_celery == "y" %}, pylint_celery{% endif %}
|
||||||
|
django-settings-module=config.settings.base
|
||||||
|
[FORMAT]
|
||||||
|
max-line-length=120
|
||||||
|
|
||||||
|
[MESSAGES CONTROL]
|
||||||
|
disable=missing-docstring,invalid-name
|
||||||
|
|
||||||
|
[DESIGN]
|
||||||
|
max-parents=13
|
||||||
|
|
||||||
|
[TYPECHECK]
|
||||||
|
generated-members=REQUEST,acl_users,aq_parent,"[a-zA-Z]+_set{1,2}",save,delete
|
|
@ -1 +0,0 @@
|
||||||
3.12
|
|
|
@ -1,20 +1,12 @@
|
||||||
# Read the Docs configuration file
|
|
||||||
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
|
|
||||||
|
|
||||||
# Required
|
|
||||||
version: 2
|
version: 2
|
||||||
|
|
||||||
# Set the version of Python and other tools you might need
|
|
||||||
build:
|
|
||||||
os: ubuntu-22.04
|
|
||||||
tools:
|
|
||||||
python: '3.12'
|
|
||||||
|
|
||||||
# Build documentation in the docs/ directory with Sphinx
|
|
||||||
sphinx:
|
sphinx:
|
||||||
configuration: docs/conf.py
|
configuration: docs/conf.py
|
||||||
|
|
||||||
# Python requirements required to build your docs
|
build:
|
||||||
|
image: testing
|
||||||
|
|
||||||
python:
|
python:
|
||||||
|
version: 3.9
|
||||||
install:
|
install:
|
||||||
- requirements: requirements/local.txt
|
- requirements: requirements/local.txt
|
||||||
|
|
|
@ -2,7 +2,7 @@ dist: focal
|
||||||
|
|
||||||
language: python
|
language: python
|
||||||
python:
|
python:
|
||||||
- "3.12"
|
- "3.9"
|
||||||
|
|
||||||
services:
|
services:
|
||||||
- {% if cookiecutter.use_docker == 'y' %}docker{% else %}postgresql{% endif %}
|
- {% if cookiecutter.use_docker == 'y' %}docker{% else %}postgresql{% endif %}
|
||||||
|
@ -10,24 +10,23 @@ jobs:
|
||||||
include:
|
include:
|
||||||
- name: "Linter"
|
- name: "Linter"
|
||||||
before_script:
|
before_script:
|
||||||
- pip install -q ruff
|
- pip install -q flake8
|
||||||
script:
|
script:
|
||||||
- ruff check .
|
- "flake8"
|
||||||
|
|
||||||
- name: "Django Test"
|
- name: "Django Test"
|
||||||
{%- if cookiecutter.use_docker == 'y' %}
|
{%- if cookiecutter.use_docker == 'y' %}
|
||||||
before_script:
|
before_script:
|
||||||
- docker compose -v
|
- docker-compose -v
|
||||||
- docker -v
|
- docker -v
|
||||||
- docker compose -f docker-compose.local.yml build
|
- docker-compose -f local.yml build
|
||||||
- docker compose -f docker-compose.docs.yml build
|
|
||||||
# Ensure celerybeat does not crash due to non-existent tables
|
# Ensure celerybeat does not crash due to non-existent tables
|
||||||
- docker compose -f docker-compose.local.yml run --rm django python manage.py migrate
|
- docker-compose -f local.yml run --rm django python manage.py migrate
|
||||||
- docker compose -f docker-compose.local.yml up -d
|
- docker-compose -f local.yml up -d
|
||||||
script:
|
script:
|
||||||
- docker compose -f docker-compose.local.yml run django pytest
|
- "docker-compose -f local.yml run django pytest"
|
||||||
after_failure:
|
after_failure:
|
||||||
- docker compose -f docker-compose.local.yml logs
|
- docker-compose -f local.yml logs
|
||||||
{%- else %}
|
{%- else %}
|
||||||
before_install:
|
before_install:
|
||||||
- sudo apt-get update -qq
|
- sudo apt-get update -qq
|
||||||
|
@ -38,9 +37,9 @@ jobs:
|
||||||
- sudo apt-get install -qq libsqlite3-dev libxml2 libxml2-dev libssl-dev libbz2-dev wget curl llvm
|
- sudo apt-get install -qq libsqlite3-dev libxml2 libxml2-dev libssl-dev libbz2-dev wget curl llvm
|
||||||
language: python
|
language: python
|
||||||
python:
|
python:
|
||||||
- "3.12"
|
- "3.9"
|
||||||
install:
|
install:
|
||||||
- pip install -r requirements/local.txt
|
- pip install -r requirements/local.txt
|
||||||
script:
|
script:
|
||||||
- pytest
|
- "pytest"
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
release: python manage.py migrate
|
release: python manage.py migrate
|
||||||
{%- if cookiecutter.use_async == "y" %}
|
{%- if cookiecutter.use_async == "y" %}
|
||||||
web: gunicorn config.asgi:application -k uvicorn_worker.UvicornWorker
|
web: gunicorn config.asgi:application -k uvicorn.workers.UvicornWorker
|
||||||
{%- else %}
|
{%- else %}
|
||||||
web: gunicorn config.wsgi:application
|
web: gunicorn config.wsgi:application
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
|
@ -3,7 +3,7 @@
|
||||||
{{ cookiecutter.description }}
|
{{ cookiecutter.description }}
|
||||||
|
|
||||||
[](https://github.com/cookiecutter/cookiecutter-django/)
|
[](https://github.com/cookiecutter/cookiecutter-django/)
|
||||||
[](https://github.com/astral-sh/ruff)
|
[](https://github.com/ambv/black)
|
||||||
|
|
||||||
{%- if cookiecutter.open_source_license != "Not open source" %}
|
{%- if cookiecutter.open_source_license != "Not open source" %}
|
||||||
|
|
||||||
|
@ -12,17 +12,17 @@ License: {{cookiecutter.open_source_license}}
|
||||||
|
|
||||||
## Settings
|
## Settings
|
||||||
|
|
||||||
Moved to [settings](https://cookiecutter-django.readthedocs.io/en/latest/1-getting-started/settings.html).
|
Moved to [settings](http://cookiecutter-django.readthedocs.io/en/latest/settings.html).
|
||||||
|
|
||||||
## Basic Commands
|
## Basic Commands
|
||||||
|
|
||||||
### Setting Up Your Users
|
### Setting Up Your Users
|
||||||
|
|
||||||
- To create a **normal user account**, just go to Sign Up and fill out the form. Once you submit it, you'll see a "Verify Your E-mail Address" page. Go to your console to see a simulated email verification message. Copy the link into your browser. Now the user's email should be verified and ready to go.
|
- To create a **normal user account**, just go to Sign Up and fill out the form. Once you submit it, you'll see a "Verify Your E-mail Address" page. Go to your console to see a simulated email verification message. Copy the link into your browser. Now the user's email should be verified and ready to go.
|
||||||
|
|
||||||
- To create a **superuser account**, use this command:
|
- To create a **superuser account**, use this command:
|
||||||
|
|
||||||
$ python manage.py createsuperuser
|
$ python manage.py createsuperuser
|
||||||
|
|
||||||
For convenience, you can keep your normal user logged in on Chrome and your superuser logged in on Firefox (or similar), so that you can see how the site behaves for both kinds of users.
|
For convenience, you can keep your normal user logged in on Chrome and your superuser logged in on Firefox (or similar), so that you can see how the site behaves for both kinds of users.
|
||||||
|
|
||||||
|
@ -46,7 +46,7 @@ To run the tests, check your test coverage, and generate an HTML coverage report
|
||||||
|
|
||||||
### Live reloading and Sass CSS compilation
|
### Live reloading and Sass CSS compilation
|
||||||
|
|
||||||
Moved to [Live reloading and SASS compilation](https://cookiecutter-django.readthedocs.io/en/latest/2-local-development/developing-locally.html#using-webpack-or-gulp).
|
Moved to [Live reloading and SASS compilation](https://cookiecutter-django.readthedocs.io/en/latest/developing-locally.html#sass-compilation-live-reloading).
|
||||||
|
|
||||||
{%- if cookiecutter.use_celery == "y" %}
|
{%- if cookiecutter.use_celery == "y" %}
|
||||||
|
|
||||||
|
@ -56,57 +56,45 @@ This app comes with Celery.
|
||||||
|
|
||||||
To run a celery worker:
|
To run a celery worker:
|
||||||
|
|
||||||
```bash
|
``` bash
|
||||||
cd {{cookiecutter.project_slug}}
|
cd {{cookiecutter.project_slug}}
|
||||||
celery -A config.celery_app worker -l info
|
celery -A config.celery_app worker -l info
|
||||||
```
|
```
|
||||||
|
|
||||||
Please note: For Celery's import magic to work, it is important _where_ the celery commands are run. If you are in the same folder with _manage.py_, you should be right.
|
Please note: For Celery's import magic to work, it is important *where* the celery commands are run. If you are in the same folder with *manage.py*, you should be right.
|
||||||
|
|
||||||
To run [periodic tasks](https://docs.celeryq.dev/en/stable/userguide/periodic-tasks.html), you'll need to start the celery beat scheduler service. You can start it as a standalone process:
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd {{cookiecutter.project_slug}}
|
|
||||||
celery -A config.celery_app beat
|
|
||||||
```
|
|
||||||
|
|
||||||
or you can embed the beat service inside a worker with the `-B` option (not recommended for production use):
|
|
||||||
|
|
||||||
```bash
|
|
||||||
cd {{cookiecutter.project_slug}}
|
|
||||||
celery -A config.celery_app worker -B -l info
|
|
||||||
```
|
|
||||||
|
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
{%- if cookiecutter.use_mailpit == "y" %}
|
{%- if cookiecutter.use_mailhog == "y" %}
|
||||||
|
|
||||||
### Email Server
|
### Email Server
|
||||||
|
|
||||||
{%- if cookiecutter.use_docker == "y" %}
|
{%- if cookiecutter.use_docker == "y" %}
|
||||||
|
|
||||||
In development, it is often nice to be able to see emails that are being sent from your application. For that reason local SMTP server [Mailpit](https://github.com/axllent/mailpit) with a web interface is available as docker container.
|
In development, it is often nice to be able to see emails that are being sent from your application. For that reason local SMTP server [MailHog](https://github.com/mailhog/MailHog) with a web interface is available as docker container.
|
||||||
|
|
||||||
Container mailpit will start automatically when you will run all docker containers.
|
Container mailhog will start automatically when you will run all docker containers.
|
||||||
Please check [cookiecutter-django Docker documentation](https://cookiecutter-django.readthedocs.io/en/latest/2-local-development/developing-locally-docker.html) for more details how to start all containers.
|
Please check [cookiecutter-django Docker documentation](http://cookiecutter-django.readthedocs.io/en/latest/deployment-with-docker.html) for more details how to start all containers.
|
||||||
|
|
||||||
With Mailpit running, to view messages that are sent by your application, open your browser and go to `http://127.0.0.1:8025`
|
With MailHog running, to view messages that are sent by your application, open your browser and go to `http://127.0.0.1:8025`
|
||||||
{%- else %}
|
{%- else %}
|
||||||
|
|
||||||
In development, it is often nice to be able to see emails that are being sent from your application. If you choose to use [Mailpit](https://github.com/axllent/mailpit) when generating the project a local SMTP server with a web interface will be available.
|
In development, it is often nice to be able to see emails that are being sent from your application. If you choose to use [MailHog](https://github.com/mailhog/MailHog) when generating the project a local SMTP server with a web interface will be available.
|
||||||
|
|
||||||
1. [Download the latest Mailpit release](https://github.com/axllent/mailpit/releases) for your OS.
|
1. [Download the latest MailHog release](https://github.com/mailhog/MailHog/releases) for your OS.
|
||||||
|
|
||||||
2. Copy the binary file to the project root.
|
2. Rename the build to `MailHog`.
|
||||||
|
|
||||||
3. Make it executable:
|
3. Copy the file to the project root.
|
||||||
|
|
||||||
$ chmod +x mailpit
|
4. Make it executable:
|
||||||
|
|
||||||
4. Spin up another terminal window and start it there:
|
$ chmod +x MailHog
|
||||||
|
|
||||||
./mailpit
|
5. Spin up another terminal window and start it there:
|
||||||
|
|
||||||
5. Check out <http://127.0.0.1:8025/> to see how it goes.
|
./MailHog
|
||||||
|
|
||||||
|
6. Check out <http://127.0.0.1:8025/> to see how it goes.
|
||||||
|
|
||||||
Now you have your own mail server running locally, ready to receive whatever you send it.
|
Now you have your own mail server running locally, ready to receive whatever you send it.
|
||||||
|
|
||||||
|
@ -130,24 +118,23 @@ The following details how to deploy this application.
|
||||||
|
|
||||||
### Heroku
|
### Heroku
|
||||||
|
|
||||||
See detailed [cookiecutter-django Heroku documentation](https://cookiecutter-django.readthedocs.io/en/latest/3-deployment/deployment-on-heroku.html).
|
See detailed [cookiecutter-django Heroku documentation](http://cookiecutter-django.readthedocs.io/en/latest/deployment-on-heroku.html).
|
||||||
|
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
{%- if cookiecutter.use_docker.lower() == "y" %}
|
{%- if cookiecutter.use_docker.lower() == "y" %}
|
||||||
|
|
||||||
### Docker
|
### Docker
|
||||||
|
|
||||||
See detailed [cookiecutter-django Docker documentation](https://cookiecutter-django.readthedocs.io/en/latest/3-deployment/deployment-with-docker.html).
|
See detailed [cookiecutter-django Docker documentation](http://cookiecutter-django.readthedocs.io/en/latest/deployment-with-docker.html).
|
||||||
|
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
{%- if cookiecutter.frontend_pipeline in ['Gulp', 'Webpack'] %}
|
{%- if cookiecutter.frontend_pipeline == 'Gulp' %}
|
||||||
|
|
||||||
### Custom Bootstrap Compilation
|
### Custom Bootstrap Compilation
|
||||||
|
|
||||||
The generated CSS is set up with automatic Bootstrap recompilation with variables of your choice.
|
The generated CSS is set up with automatic Bootstrap recompilation with variables of your choice.
|
||||||
Bootstrap v5 is installed using npm and customised by tweaking your variables in `static/sass/custom_bootstrap_vars`.
|
Bootstrap v5 is installed using npm and customised by tweaking your variables in `static/sass/custom_bootstrap_vars`.
|
||||||
|
|
||||||
You can find a list of available variables [in the bootstrap source](https://github.com/twbs/bootstrap/blob/v5.1.3/scss/_variables.scss), or get explanations on them in the [Bootstrap docs](https://getbootstrap.com/docs/5.1/customize/sass/).
|
You can find a list of available variables [in the bootstrap source](https://github.com/twbs/bootstrap/blob/main/scss/_variables.scss), or get explanations on them in the [Bootstrap docs](https://getbootstrap.com/docs/5.1/customize/sass/).
|
||||||
|
|
||||||
Bootstrap's javascript as well as its dependencies are concatenated into a single file: `static/js/vendors.js`.
|
Bootstrap's javascript as well as its dependencies is concatenated into a single file: `static/js/vendors.js`.
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
|
@ -1,5 +1,4 @@
|
||||||
#!/usr/bin/env bash
|
#!/usr/bin/env bash
|
||||||
{%- if cookiecutter.frontend_pipeline == "Django Compressor" %}
|
|
||||||
|
|
||||||
compress_enabled() {
|
compress_enabled() {
|
||||||
python << END
|
python << END
|
||||||
|
@ -20,7 +19,4 @@ if compress_enabled
|
||||||
then
|
then
|
||||||
python manage.py compress
|
python manage.py compress
|
||||||
fi
|
fi
|
||||||
{%- endif %}
|
|
||||||
|
|
||||||
python manage.py collectstatic --noinput
|
python manage.py collectstatic --noinput
|
||||||
python manage.py compilemessages -i site-packages
|
|
||||||
|
|
|
@ -1,8 +1,10 @@
|
||||||
# define an alias for the specific python version used in this file.
|
ARG PYTHON_VERSION=3.9-slim-bullseye
|
||||||
FROM docker.io/python:3.12.11-slim-bookworm AS python
|
|
||||||
|
# define an alias for the specfic python version used in this file.
|
||||||
|
FROM python:${PYTHON_VERSION} as python
|
||||||
|
|
||||||
# Python build stage
|
# Python build stage
|
||||||
FROM python AS python-build-stage
|
FROM python as python-build-stage
|
||||||
|
|
||||||
ARG BUILD_ENVIRONMENT=local
|
ARG BUILD_ENVIRONMENT=local
|
||||||
|
|
||||||
|
@ -10,46 +12,33 @@ ARG BUILD_ENVIRONMENT=local
|
||||||
RUN apt-get update && apt-get install --no-install-recommends -y \
|
RUN apt-get update && apt-get install --no-install-recommends -y \
|
||||||
# dependencies for building Python packages
|
# dependencies for building Python packages
|
||||||
build-essential \
|
build-essential \
|
||||||
# psycopg dependencies
|
# psycopg2 dependencies
|
||||||
libpq-dev
|
libpq-dev
|
||||||
|
|
||||||
# Requirements are installed here to ensure they will be cached.
|
# Requirements are installed here to ensure they will be cached.
|
||||||
COPY ./requirements .
|
COPY ./requirements .
|
||||||
|
|
||||||
# Create Python Dependency and Sub-Dependency Wheels.
|
# Create Python Dependency and Sub-Dependency Wheels.
|
||||||
RUN pip wheel --wheel-dir /usr/src/app/wheels \
|
RUN pip wheel --wheel-dir /usr/src/app/wheels \
|
||||||
-r ${BUILD_ENVIRONMENT}.txt
|
-r ${BUILD_ENVIRONMENT}.txt
|
||||||
|
|
||||||
|
|
||||||
# Python 'run' stage
|
# Python 'run' stage
|
||||||
FROM python AS python-run-stage
|
FROM python as python-run-stage
|
||||||
|
|
||||||
ARG BUILD_ENVIRONMENT=local
|
ARG BUILD_ENVIRONMENT=local
|
||||||
ARG APP_HOME=/app
|
ARG APP_HOME=/app
|
||||||
|
|
||||||
ENV PYTHONUNBUFFERED=1
|
ENV PYTHONUNBUFFERED 1
|
||||||
ENV PYTHONDONTWRITEBYTECODE=1
|
ENV PYTHONDONTWRITEBYTECODE 1
|
||||||
ENV BUILD_ENV=${BUILD_ENVIRONMENT}
|
ENV BUILD_ENV ${BUILD_ENVIRONMENT}
|
||||||
|
|
||||||
WORKDIR ${APP_HOME}
|
WORKDIR ${APP_HOME}
|
||||||
|
|
||||||
{% if cookiecutter.use_docker == "y" %}
|
|
||||||
# devcontainer dependencies and utils
|
|
||||||
RUN apt-get update && apt-get install --no-install-recommends -y \
|
|
||||||
sudo git bash-completion nano ssh
|
|
||||||
|
|
||||||
# Create devcontainer user and add it to sudoers
|
|
||||||
RUN groupadd --gid 1000 dev-user \
|
|
||||||
&& useradd --uid 1000 --gid dev-user --shell /bin/bash --create-home dev-user \
|
|
||||||
&& echo dev-user ALL=\(root\) NOPASSWD:ALL > /etc/sudoers.d/dev-user \
|
|
||||||
&& chmod 0440 /etc/sudoers.d/dev-user
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
# Install required system dependencies
|
# Install required system dependencies
|
||||||
RUN apt-get update && apt-get install --no-install-recommends -y \
|
RUN apt-get update && apt-get install --no-install-recommends -y \
|
||||||
# psycopg dependencies
|
# psycopg2 dependencies
|
||||||
libpq-dev \
|
libpq-dev \
|
||||||
wait-for-it \
|
|
||||||
# Translations dependencies
|
# Translations dependencies
|
||||||
gettext \
|
gettext \
|
||||||
# cleaning up unused files
|
# cleaning up unused files
|
||||||
|
@ -58,11 +47,11 @@ RUN apt-get update && apt-get install --no-install-recommends -y \
|
||||||
|
|
||||||
# All absolute dir copies ignore workdir instruction. All relative dir copies are wrt to the workdir instruction
|
# All absolute dir copies ignore workdir instruction. All relative dir copies are wrt to the workdir instruction
|
||||||
# copy python dependency wheels from python-build-stage
|
# copy python dependency wheels from python-build-stage
|
||||||
COPY --from=python-build-stage /usr/src/app/wheels /wheels/
|
COPY --from=python-build-stage /usr/src/app/wheels /wheels/
|
||||||
|
|
||||||
# use wheels to install python dependencies
|
# use wheels to install python dependencies
|
||||||
RUN pip install --no-cache-dir --no-index --find-links=/wheels/ /wheels/* \
|
RUN pip install --no-cache-dir --no-index --find-links=/wheels/ /wheels/* \
|
||||||
&& rm -rf /wheels/
|
&& rm -rf /wheels/
|
||||||
|
|
||||||
COPY ./compose/production/django/entrypoint /entrypoint
|
COPY ./compose/production/django/entrypoint /entrypoint
|
||||||
RUN sed -i 's/\r$//g' /entrypoint
|
RUN sed -i 's/\r$//g' /entrypoint
|
||||||
|
|
|
@ -5,4 +5,4 @@ set -o nounset
|
||||||
|
|
||||||
|
|
||||||
rm -f './celerybeat.pid'
|
rm -f './celerybeat.pid'
|
||||||
exec watchfiles --filter python celery.__main__.main --args '-A config.celery_app beat -l INFO'
|
celery -A config.celery_app beat -l INFO
|
||||||
|
|
|
@ -4,13 +4,8 @@ set -o errexit
|
||||||
set -o nounset
|
set -o nounset
|
||||||
|
|
||||||
|
|
||||||
until timeout 10 celery -A config.celery_app inspect ping; do
|
celery \
|
||||||
>&2 echo "Celery workers not available"
|
-A config.celery_app \
|
||||||
done
|
-b "${CELERY_BROKER_URL}" \
|
||||||
|
flower \
|
||||||
echo 'Starting flower'
|
--basic_auth="${CELERY_FLOWER_USER}:${CELERY_FLOWER_PASSWORD}"
|
||||||
|
|
||||||
|
|
||||||
exec watchfiles --filter python celery.__main__.main \
|
|
||||||
--args \
|
|
||||||
"-A config.celery_app -b \"${REDIS_URL}\" flower --basic_auth=\"${CELERY_FLOWER_USER}:${CELERY_FLOWER_PASSWORD}\""
|
|
||||||
|
|
|
@ -4,4 +4,4 @@ set -o errexit
|
||||||
set -o nounset
|
set -o nounset
|
||||||
|
|
||||||
|
|
||||||
exec watchfiles --filter python celery.__main__.main --args '-A config.celery_app worker -l INFO'
|
watchgod celery.__main__.main --args -A config.celery_app worker -l INFO
|
||||||
|
|
|
@ -7,7 +7,7 @@ set -o nounset
|
||||||
|
|
||||||
python manage.py migrate
|
python manage.py migrate
|
||||||
{%- if cookiecutter.use_async == 'y' %}
|
{%- if cookiecutter.use_async == 'y' %}
|
||||||
exec uvicorn config.asgi:application --host 0.0.0.0 --reload --reload-include '*.html'
|
uvicorn config.asgi:application --host 0.0.0.0 --reload
|
||||||
{%- else %}
|
{%- else %}
|
||||||
exec python manage.py runserver_plus 0.0.0.0:8000
|
python manage.py runserver_plus 0.0.0.0:8000
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
|
@ -1,16 +1,18 @@
|
||||||
# define an alias for the specific python version used in this file.
|
ARG PYTHON_VERSION=3.9-slim-bullseye
|
||||||
FROM docker.io/python:3.12.11-slim-bookworm AS python
|
|
||||||
|
# define an alias for the specfic python version used in this file.
|
||||||
|
FROM python:${PYTHON_VERSION} as python
|
||||||
|
|
||||||
|
|
||||||
# Python build stage
|
# Python build stage
|
||||||
FROM python AS python-build-stage
|
FROM python as python-build-stage
|
||||||
|
|
||||||
ENV PYTHONDONTWRITEBYTECODE=1
|
ENV PYTHONDONTWRITEBYTECODE 1
|
||||||
|
|
||||||
RUN apt-get update && apt-get install --no-install-recommends -y \
|
RUN apt-get update && apt-get install --no-install-recommends -y \
|
||||||
# dependencies for building Python packages
|
# dependencies for building Python packages
|
||||||
build-essential \
|
build-essential \
|
||||||
# psycopg dependencies
|
# psycopg2 dependencies
|
||||||
libpq-dev \
|
libpq-dev \
|
||||||
# cleaning up unused files
|
# cleaning up unused files
|
||||||
&& apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false \
|
&& apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false \
|
||||||
|
@ -20,22 +22,22 @@ RUN apt-get update && apt-get install --no-install-recommends -y \
|
||||||
COPY ./requirements /requirements
|
COPY ./requirements /requirements
|
||||||
|
|
||||||
# create python dependency wheels
|
# create python dependency wheels
|
||||||
RUN pip wheel --no-cache-dir --wheel-dir /usr/src/app/wheels \
|
RUN pip wheel --no-cache-dir --no-deps --wheel-dir /usr/src/app/wheels \
|
||||||
-r /requirements/local.txt -r /requirements/production.txt \
|
-r /requirements/local.txt -r /requirements/production.txt \
|
||||||
&& rm -rf /requirements
|
&& rm -rf /requirements
|
||||||
|
|
||||||
|
|
||||||
# Python 'run' stage
|
# Python 'run' stage
|
||||||
FROM python AS python-run-stage
|
FROM python as python-run-stage
|
||||||
|
|
||||||
ARG BUILD_ENVIRONMENT
|
ARG BUILD_ENVIRONMENT
|
||||||
ENV PYTHONUNBUFFERED=1
|
ENV PYTHONUNBUFFERED 1
|
||||||
ENV PYTHONDONTWRITEBYTECODE=1
|
ENV PYTHONDONTWRITEBYTECODE 1
|
||||||
|
|
||||||
RUN apt-get update && apt-get install --no-install-recommends -y \
|
RUN apt-get update && apt-get install --no-install-recommends -y \
|
||||||
# To run the Makefile
|
# To run the Makefile
|
||||||
make \
|
make \
|
||||||
# psycopg dependencies
|
# psycopg2 dependencies
|
||||||
libpq-dev \
|
libpq-dev \
|
||||||
# Translations dependencies
|
# Translations dependencies
|
||||||
gettext \
|
gettext \
|
||||||
|
|
|
@ -4,4 +4,4 @@ set -o errexit
|
||||||
set -o pipefail
|
set -o pipefail
|
||||||
set -o nounset
|
set -o nounset
|
||||||
|
|
||||||
exec make livehtml
|
make livehtml
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user