mirror of
https://github.com/cookiecutter/cookiecutter-django.git
synced 2025-08-15 09:24:52 +03:00
Merge branch 'master' into patch-5
# Conflicts: # README.rst
This commit is contained in:
commit
33e6ae972f
|
@ -12,7 +12,7 @@ trim_trailing_whitespace = true
|
||||||
indent_style = space
|
indent_style = space
|
||||||
indent_size = 4
|
indent_size = 4
|
||||||
|
|
||||||
[*.{html,css,scss,json,yml}]
|
[*.{html,css,scss,json,yml,xml}]
|
||||||
indent_style = space
|
indent_style = space
|
||||||
indent_size = 2
|
indent_size = 2
|
||||||
|
|
||||||
|
|
56
.github/CONTRIBUTORS-template.md
vendored
Normal file
56
.github/CONTRIBUTORS-template.md
vendored
Normal file
|
@ -0,0 +1,56 @@
|
||||||
|
# Contributors
|
||||||
|
|
||||||
|
## Core Developers
|
||||||
|
|
||||||
|
These contributors have commit flags for the repository, and are able to
|
||||||
|
accept and merge pull requests.
|
||||||
|
|
||||||
|
<table>
|
||||||
|
<tr>
|
||||||
|
<th>Name</th>
|
||||||
|
<th>Github</th>
|
||||||
|
<th>Twitter</th>
|
||||||
|
</tr>
|
||||||
|
{%- for contributor in core_contributors %}
|
||||||
|
<tr>
|
||||||
|
<td>{{ contributor.name }}</td>
|
||||||
|
<td>
|
||||||
|
<a href="https://github.com/{{ contributor.github_login }}">{{ contributor.github_login }}</a>
|
||||||
|
</td>
|
||||||
|
<td>{{ contributor.twitter_username }}</td>
|
||||||
|
</tr>
|
||||||
|
{%- endfor %}
|
||||||
|
</table>
|
||||||
|
|
||||||
|
*Audrey is also the creator of Cookiecutter. Audrey and Daniel are on
|
||||||
|
the Cookiecutter core team.*
|
||||||
|
|
||||||
|
## Other Contributors
|
||||||
|
|
||||||
|
Listed in alphabetical order.
|
||||||
|
|
||||||
|
<table>
|
||||||
|
<tr>
|
||||||
|
<th>Name</th>
|
||||||
|
<th>Github</th>
|
||||||
|
<th>Twitter</th>
|
||||||
|
</tr>
|
||||||
|
{%- for contributor in other_contributors %}
|
||||||
|
<tr>
|
||||||
|
<td>{{ contributor.name }}</td>
|
||||||
|
<td>
|
||||||
|
<a href="https://github.com/{{ contributor.github_login }}">{{ contributor.github_login }}</a>
|
||||||
|
</td>
|
||||||
|
<td>{{ contributor.twitter_username }}</td>
|
||||||
|
</tr>
|
||||||
|
{%- endfor %}
|
||||||
|
</table>
|
||||||
|
|
||||||
|
### Special Thanks
|
||||||
|
|
||||||
|
The following haven't provided code directly, but have provided
|
||||||
|
guidance and advice.
|
||||||
|
|
||||||
|
- Jannis Leidel
|
||||||
|
- Nate Aune
|
||||||
|
- Barry Morrison
|
6
.github/FUNDING.yml
vendored
6
.github/FUNDING.yml
vendored
|
@ -1,7 +1,7 @@
|
||||||
# These are supported funding model platforms
|
# These are supported funding model platforms
|
||||||
|
|
||||||
github: pydanny
|
github: [pydanny, browniebroke]
|
||||||
patreon: roygreenfeld
|
patreon: feldroy
|
||||||
open_collective: # Replace with a single Open Collective username
|
open_collective: # Replace with a single Open Collective username
|
||||||
ko_fi: # Replace with a single Ko-fi username
|
ko_fi: # Replace with a single Ko-fi username
|
||||||
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
|
tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel
|
||||||
|
@ -9,4 +9,4 @@ community_bridge: # Replace with a single Community Bridge project-name e.g., cl
|
||||||
liberapay: # Replace with a single Liberapay username
|
liberapay: # Replace with a single Liberapay username
|
||||||
issuehunt: # Replace with a single IssueHunt username
|
issuehunt: # Replace with a single IssueHunt username
|
||||||
otechie: # Replace with a single Otechie username
|
otechie: # Replace with a single Otechie username
|
||||||
custom: ['https://www.patreon.com/browniebroke']
|
custom: ["https://www.patreon.com/browniebroke"]
|
||||||
|
|
1
.github/ISSUE_TEMPLATE.md
vendored
1
.github/ISSUE_TEMPLATE.md
vendored
|
@ -1 +0,0 @@
|
||||||
## [Make sure to follow one of the issue templates we've got](https://github.com/pydanny/cookiecutter-django/issues/new/choose), otherwise the issue might be closed immeditely
|
|
53
.github/ISSUE_TEMPLATE/bug.md
vendored
53
.github/ISSUE_TEMPLATE/bug.md
vendored
|
@ -1,21 +1,58 @@
|
||||||
---
|
---
|
||||||
name: Bug Report
|
name: Bug Report
|
||||||
about: Report a bug
|
about: Report a bug
|
||||||
|
labels: bug
|
||||||
---
|
---
|
||||||
|
|
||||||
## What happened?
|
## What happened?
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## What should've happened instead?
|
## What should've happened instead?
|
||||||
|
|
||||||
|
## Additional details
|
||||||
|
|
||||||
|
<!-- To assist you best, please include commands that you've run, options you've selected and any relevant logs -->
|
||||||
|
|
||||||
|
* Host system configuration:
|
||||||
|
* Version of cookiecutter CLI (get it with `cookiecutter --version`):
|
||||||
|
* OS name and version:
|
||||||
|
|
||||||
|
On Linux, run
|
||||||
|
```bash
|
||||||
|
lsb_release -a 2> /dev/null || cat /etc/redhat-release 2> /dev/null || cat /etc/*-release 2> /dev/null || cat /etc/issue 2> /dev/null
|
||||||
|
```
|
||||||
|
|
||||||
|
On MacOs, run
|
||||||
|
```bash
|
||||||
|
sw_vers
|
||||||
|
```
|
||||||
|
|
||||||
|
On Windows, via CMD, run
|
||||||
|
```
|
||||||
|
systeminfo | findstr /B /C:"OS Name" /C:"OS Version"
|
||||||
|
```
|
||||||
|
|
||||||
|
|
||||||
|
```bash
|
||||||
|
# Insert here the OS name and version
|
||||||
|
|
||||||
## Steps to reproduce
|
```
|
||||||
|
|
||||||
[//]: # (Any or all of the following:)
|
* Python version, run `python3 -V`:
|
||||||
[//]: # (* Host system configuration: OS, Docker & friends' versions etc.)
|
* Docker version (if using Docker), run `docker --version`:
|
||||||
[//]: # (* Replay file https://cookiecutter.readthedocs.io/en/latest/advanced/replay.html)
|
* docker-compose version (if using Docker), run `docker-compose --version`:
|
||||||
[//]: # (* Logs)
|
* ...
|
||||||
|
* Options selected and/or [replay file](https://cookiecutter.readthedocs.io/en/latest/advanced/replay.html):
|
||||||
|
On Linux and MacOS: `cat ${HOME}/.cookiecutter_replay/cookiecutter-django.json`
|
||||||
|
(Please, take care to remove sensitive information)
|
||||||
|
```json
|
||||||
|
# Insert here the replay file content
|
||||||
|
```
|
||||||
|
<summary>
|
||||||
|
Logs:
|
||||||
|
<details>
|
||||||
|
<pre>
|
||||||
|
$ cookiecutter https://github.com/cookiecutter/cookiecutter-django
|
||||||
|
project_name [Project Name]: ...
|
||||||
|
</pre>
|
||||||
|
</details>
|
||||||
|
</summary>
|
||||||
|
|
17
.github/ISSUE_TEMPLATE/feature.md
vendored
17
.github/ISSUE_TEMPLATE/feature.md
vendored
|
@ -1,24 +1,13 @@
|
||||||
---
|
---
|
||||||
name: New Feature Proposal
|
name: New Feature Proposal
|
||||||
about: Propose a new feature
|
about: Propose a new feature
|
||||||
|
labels: enhancement
|
||||||
---
|
---
|
||||||
|
|
||||||
## Description
|
## Description
|
||||||
|
|
||||||
[//]: # (What's it you're proposing? How should it be implemented?)
|
What are you proposing? How should it be implemented?
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## Rationale
|
## Rationale
|
||||||
|
|
||||||
[//]: # (Why should this feature be implemented?)
|
Why should this feature be implemented?
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## Use case(s) / visualization(s)
|
|
||||||
|
|
||||||
[//]: # ("Better to see something once than to hear about it a thousand times.")
|
|
||||||
|
|
||||||
|
|
||||||
|
|
24
.github/ISSUE_TEMPLATE/improvement.md
vendored
24
.github/ISSUE_TEMPLATE/improvement.md
vendored
|
@ -1,24 +0,0 @@
|
||||||
---
|
|
||||||
name: Improvement Suggestion
|
|
||||||
about: Let us know how we could improve
|
|
||||||
---
|
|
||||||
|
|
||||||
## Description
|
|
||||||
|
|
||||||
[//]: # (What's it you're proposing? How should it be implemented?)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## Rationale
|
|
||||||
|
|
||||||
[//]: # (Why should this feature be implemented?)
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## Use case(s) / visualization(s)
|
|
||||||
|
|
||||||
[//]: # ("Better to see something once than to hear about it a thousand times.")
|
|
||||||
|
|
||||||
|
|
4
.github/ISSUE_TEMPLATE/paid-support.md
vendored
4
.github/ISSUE_TEMPLATE/paid-support.md
vendored
|
@ -3,8 +3,10 @@ name: Paid Support Request
|
||||||
about: Ask Core Team members to help you out
|
about: Ask Core Team members to help you out
|
||||||
---
|
---
|
||||||
|
|
||||||
Provided your question goes beyound [regular support](https://github.com/pydanny/cookiecutter-django/issues/new?template=question.md), and/or the task at hand is of timely/high priority nature use the below information to reach out for contributors directly.
|
Provided your question goes beyond [regular support](https://github.com/cookiecutter/cookiecutter-django/issues/new?template=question.md), and/or the task at hand is of timely/high priority nature use the below information to reach out for contributors directly.
|
||||||
|
|
||||||
* Daniel Roy Greenfeld, Project Lead ([GitHub](https://github.com/pydanny), [Patreon](https://www.patreon.com/danielroygreenfeld)): expertise in Django and AWS ELB.
|
* Daniel Roy Greenfeld, Project Lead ([GitHub](https://github.com/pydanny), [Patreon](https://www.patreon.com/danielroygreenfeld)): expertise in Django and AWS ELB.
|
||||||
|
|
||||||
* Nikita Shupeyko, Core Developer ([GitHub](https://github.com/webyneter)): expertise in Python/Django, hands-on DevOps and frontend experience.
|
* Nikita Shupeyko, Core Developer ([GitHub](https://github.com/webyneter)): expertise in Python/Django, hands-on DevOps and frontend experience.
|
||||||
|
|
||||||
|
* Bruno Alla, Core Developer ([GitHub](https://github.com/sponsors/browniebroke)).
|
||||||
|
|
9
.github/ISSUE_TEMPLATE/question.md
vendored
9
.github/ISSUE_TEMPLATE/question.md
vendored
|
@ -1,6 +1,11 @@
|
||||||
---
|
---
|
||||||
name: Question
|
name: Question
|
||||||
about: Please, ask your question on StackOverflow or Gitter
|
about: Please ask your question on StackOverflow, Discord or GitHub Discussions.
|
||||||
|
labels: question
|
||||||
---
|
---
|
||||||
|
|
||||||
First, make sure to examine [the docs](https://cookiecutter-django.readthedocs.io/en/latest/). If that doesn't help post a question on [StackOverflow](https://stackoverflow.com/questions/tagged/cookiecutter-django) tagged with `cookiecutter-django`. Finally, feel free to join [Gitter](https://gitter.im/pydanny/cookiecutter-django) and ask around.
|
First, make sure to examine [the docs](https://cookiecutter-django.readthedocs.io/en/latest/). If that doesn't help, we recommend one of these 3 main channels:
|
||||||
|
|
||||||
|
- If your issue is related to Django + something else but was generated with cookiecutter-django, the best is to post a question on [StackOverflow](https://stackoverflow.com/questions/tagged/cookiecutter-django) tagged with `cookiecutter-django`, you would get more visibility from other communities as well.
|
||||||
|
- Join us on [Discord](https://discord.gg/uFXweDQc5a) and ask around.
|
||||||
|
- Start [a discussion](https://github.com/cookiecutter/cookiecutter-django/discussions) on our project's GitHub.
|
||||||
|
|
28
.github/ISSUE_TEMPLATE/regression.md
vendored
28
.github/ISSUE_TEMPLATE/regression.md
vendored
|
@ -1,28 +0,0 @@
|
||||||
---
|
|
||||||
name: Regression Report
|
|
||||||
about: Let us know if something that'd been working has broke
|
|
||||||
---
|
|
||||||
|
|
||||||
## What happened before?
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## What happens now?
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## Last stable commit / Since when?
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
## Steps to reproduce
|
|
||||||
|
|
||||||
[//]: # (Any or all of the following:)
|
|
||||||
[//]: # (* Host system configuration: OS, Docker & friends' versions etc.)
|
|
||||||
[//]: # (* Project generation options)
|
|
||||||
[//]: # (* Logs)
|
|
||||||
|
|
||||||
|
|
27
.github/PULL_REQUEST_TEMPLATE.md
vendored
27
.github/PULL_REQUEST_TEMPLATE.md
vendored
|
@ -1,27 +1,18 @@
|
||||||
[//]: # (Thank you for helping us out: your efforts mean great deal to the project and the community as a whole!)
|
<!-- Thank you for helping us out: your efforts mean a great deal to the project and the community as a whole! -->
|
||||||
|
|
||||||
[//]: # (Before you proceed:)
|
|
||||||
|
|
||||||
[//]: # (1. Make sure to add yourself to `CONTRIBUTORS.rst` through this PR provided you're contributing here for the first time)
|
|
||||||
[//]: # (2. Don't forget to update the `docs/` presuming others would benefit from a concise description of whatever that you're proposing)
|
|
||||||
|
|
||||||
|
|
||||||
## Description
|
## Description
|
||||||
|
|
||||||
[//]: # (What's it you're proposing?)
|
<!-- What's it you're proposing? -->
|
||||||
|
|
||||||
|
|
||||||
|
Checklist:
|
||||||
|
|
||||||
|
- [ ] I've made sure that tests are updated accordingly (especially if adding or updating a template option)
|
||||||
|
- [ ] I've updated the documentation or confirm that my change doesn't require any updates
|
||||||
|
|
||||||
## Rationale
|
## Rationale
|
||||||
|
|
||||||
[//]: # (Why does the project need that?)
|
<!--
|
||||||
|
Why does this project need the change you're proposing?
|
||||||
|
If this pull request fixes an open issue, don't forget to link it with `Fix #NNNN`
|
||||||
|
-->
|
||||||
|
|
||||||
## Use case(s) / visualization(s)
|
|
||||||
|
|
||||||
[//]: # ("Better to see something once than to hear about it a thousand times.")
|
|
||||||
|
|
||||||
|
|
||||||
|
|
8
.github/changelog-template.md
vendored
Normal file
8
.github/changelog-template.md
vendored
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
{%- for change_type, pulls in grouped_pulls.items() %}
|
||||||
|
{%- if pulls %}
|
||||||
|
### {{ change_type }}
|
||||||
|
{%- for pull_request in pulls %}
|
||||||
|
- {{ pull_request.title }} ([#{{ pull_request.number }}]({{ pull_request.html_url }}))
|
||||||
|
{%- endfor -%}
|
||||||
|
{% endif -%}
|
||||||
|
{% endfor -%}
|
1366
.github/contributors.json
vendored
Normal file
1366
.github/contributors.json
vendored
Normal file
File diff suppressed because it is too large
Load Diff
20
.github/dependabot.yml
vendored
Normal file
20
.github/dependabot.yml
vendored
Normal file
|
@ -0,0 +1,20 @@
|
||||||
|
# Config for Dependabot updates. See Documentation here:
|
||||||
|
# https://docs.github.com/code-security/supply-chain-security/keeping-your-dependencies-updated-automatically/configuration-options-for-dependency-updates
|
||||||
|
|
||||||
|
version: 2
|
||||||
|
updates:
|
||||||
|
# Update GitHub actions in workflows
|
||||||
|
- package-ecosystem: "github-actions"
|
||||||
|
directory: "/"
|
||||||
|
schedule:
|
||||||
|
interval: "daily"
|
||||||
|
labels:
|
||||||
|
- "update"
|
||||||
|
|
||||||
|
# Update npm packages
|
||||||
|
- package-ecosystem: "npm"
|
||||||
|
directory: "{{cookiecutter.project_slug}}/"
|
||||||
|
schedule:
|
||||||
|
interval: "daily"
|
||||||
|
labels:
|
||||||
|
- "update"
|
29
.github/release-drafter.yml
vendored
29
.github/release-drafter.yml
vendored
|
@ -1,29 +0,0 @@
|
||||||
categories:
|
|
||||||
- title: 'Breaking Changes'
|
|
||||||
labels:
|
|
||||||
- 'breaking'
|
|
||||||
- title: 'Major Changes'
|
|
||||||
labels:
|
|
||||||
- 'major'
|
|
||||||
- title: 'Minor Changes'
|
|
||||||
labels:
|
|
||||||
- 'enhancement'
|
|
||||||
- title: 'Bugfixes'
|
|
||||||
labels:
|
|
||||||
- 'bug'
|
|
||||||
- title: 'Removals'
|
|
||||||
labels:
|
|
||||||
- 'removed'
|
|
||||||
- title: 'Documentation updates'
|
|
||||||
labels:
|
|
||||||
- 'docs'
|
|
||||||
|
|
||||||
exclude-labels:
|
|
||||||
- 'skip-changelog'
|
|
||||||
- 'update'
|
|
||||||
- 'project infrastructure'
|
|
||||||
|
|
||||||
template: |
|
|
||||||
## Changes
|
|
||||||
|
|
||||||
$CHANGES
|
|
123
.github/workflows/ci.yml
vendored
Normal file
123
.github/workflows/ci.yml
vendored
Normal file
|
@ -0,0 +1,123 @@
|
||||||
|
name: CI
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
pull_request:
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: ${{ github.head_ref || github.run_id }}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
lint:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: "3.10"
|
||||||
|
cache: pip
|
||||||
|
- name: Run pre-commit
|
||||||
|
uses: pre-commit/action@v3.0.0
|
||||||
|
|
||||||
|
tests:
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
os:
|
||||||
|
- ubuntu-latest
|
||||||
|
- windows-latest
|
||||||
|
- macOS-latest
|
||||||
|
|
||||||
|
name: "pytest ${{ matrix.os }}"
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: "3.10"
|
||||||
|
cache: pip
|
||||||
|
- name: Install dependencies
|
||||||
|
run: pip install -r requirements.txt
|
||||||
|
- name: Run tests
|
||||||
|
run: pytest tests
|
||||||
|
|
||||||
|
docker:
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
script:
|
||||||
|
- name: Basic
|
||||||
|
args: ""
|
||||||
|
- name: Celery & DRF
|
||||||
|
args: "use_celery=y use_drf=y"
|
||||||
|
- name: Gulp
|
||||||
|
args: "frontend_pipeline=Gulp"
|
||||||
|
- name: Webpack
|
||||||
|
args: "frontend_pipeline=Webpack"
|
||||||
|
|
||||||
|
name: "Docker ${{ matrix.script.name }}"
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
env:
|
||||||
|
DOCKER_BUILDKIT: 1
|
||||||
|
COMPOSE_DOCKER_CLI_BUILD: 1
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: "3.10"
|
||||||
|
cache: pip
|
||||||
|
- name: Install dependencies
|
||||||
|
run: pip install -r requirements.txt
|
||||||
|
- name: Docker ${{ matrix.script.name }}
|
||||||
|
run: sh tests/test_docker.sh ${{ matrix.script.args }}
|
||||||
|
|
||||||
|
bare:
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
script:
|
||||||
|
- name: Celery
|
||||||
|
args: "use_celery=y frontend_pipeline='Django Compressor'"
|
||||||
|
- name: Gulp
|
||||||
|
args: "frontend_pipeline=Gulp"
|
||||||
|
- name: Webpack
|
||||||
|
args: "frontend_pipeline=Webpack"
|
||||||
|
|
||||||
|
name: "Bare metal ${{ matrix.script.name }}"
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
services:
|
||||||
|
redis:
|
||||||
|
image: redis:5.0
|
||||||
|
ports:
|
||||||
|
- 6379:6379
|
||||||
|
postgres:
|
||||||
|
image: postgres:12
|
||||||
|
ports:
|
||||||
|
- 5432:5432
|
||||||
|
env:
|
||||||
|
POSTGRES_PASSWORD: postgres
|
||||||
|
|
||||||
|
env:
|
||||||
|
CELERY_BROKER_URL: "redis://localhost:6379/0"
|
||||||
|
# postgres://user:password@host:port/database
|
||||||
|
DATABASE_URL: "postgres://postgres:postgres@localhost:5432/postgres"
|
||||||
|
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: "3.10"
|
||||||
|
cache: pip
|
||||||
|
cache-dependency-path: |
|
||||||
|
requirements.txt
|
||||||
|
{{cookiecutter.project_slug}}/requirements/base.txt
|
||||||
|
{{cookiecutter.project_slug}}/requirements/local.txt
|
||||||
|
- name: Install dependencies
|
||||||
|
run: pip install -r requirements.txt
|
||||||
|
- uses: actions/setup-node@v3
|
||||||
|
with:
|
||||||
|
node-version: "16"
|
||||||
|
- name: Bare Metal ${{ matrix.script.name }}
|
||||||
|
run: sh tests/test_bare.sh ${{ matrix.script.args }}
|
30
.github/workflows/django-issue-checker.yml
vendored
Normal file
30
.github/workflows/django-issue-checker.yml
vendored
Normal file
|
@ -0,0 +1,30 @@
|
||||||
|
# Creates a new issue for Major/Minor Django updates that keeps track
|
||||||
|
# of all dependencies that need to be updated/merged in order for the
|
||||||
|
# latest Django version to also be merged.
|
||||||
|
name: Django Issue Checker
|
||||||
|
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: "28 5 * * *"
|
||||||
|
# Manual trigger
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
issue-checker:
|
||||||
|
# Disables this workflow from running in a repository that is not part of the indicated organization/user
|
||||||
|
if: github.repository_owner == 'cookiecutter'
|
||||||
|
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: "3.10"
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
pip install -r requirements.txt
|
||||||
|
- name: Create Django Major Issue
|
||||||
|
run: python scripts/create_django_issue.py
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
14
.github/workflows/draft-release.yml
vendored
14
.github/workflows/draft-release.yml
vendored
|
@ -1,14 +0,0 @@
|
||||||
name: Release Drafter
|
|
||||||
|
|
||||||
on:
|
|
||||||
push:
|
|
||||||
branches:
|
|
||||||
- master
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
release_notes:
|
|
||||||
runs-on: ubuntu-latest
|
|
||||||
steps:
|
|
||||||
- uses: release-drafter/release-drafter@v5
|
|
||||||
env:
|
|
||||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
|
40
.github/workflows/issue-manager.yml
vendored
Normal file
40
.github/workflows/issue-manager.yml
vendored
Normal file
|
@ -0,0 +1,40 @@
|
||||||
|
# Automatically close issues or pull requests that have a label, after a custom delay, if no one replies.
|
||||||
|
# https://github.com/tiangolo/issue-manager
|
||||||
|
name: Issue Manager
|
||||||
|
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: "12 0 * * *"
|
||||||
|
issue_comment:
|
||||||
|
types:
|
||||||
|
- created
|
||||||
|
issues:
|
||||||
|
types:
|
||||||
|
- labeled
|
||||||
|
pull_request_target:
|
||||||
|
types:
|
||||||
|
- labeled
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
issue-manager:
|
||||||
|
# Disables this workflow from running in a repository that is not part of the indicated organization/user
|
||||||
|
if: github.repository_owner == 'cookiecutter'
|
||||||
|
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: tiangolo/issue-manager@0.4.0
|
||||||
|
with:
|
||||||
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
config: >
|
||||||
|
{
|
||||||
|
"answered": {
|
||||||
|
"message": "Assuming the question was answered, this will be automatically closed now."
|
||||||
|
},
|
||||||
|
"solved": {
|
||||||
|
"message": "Assuming the original issue was solved, it will be automatically closed now."
|
||||||
|
},
|
||||||
|
"waiting": {
|
||||||
|
"message": "Automatically closing after waiting for additional info. To re-open, please provide the additional information requested."
|
||||||
|
}
|
||||||
|
}
|
47
.github/workflows/pre-commit-autoupdate.yml
vendored
Normal file
47
.github/workflows/pre-commit-autoupdate.yml
vendored
Normal file
|
@ -0,0 +1,47 @@
|
||||||
|
# Run pre-commit autoupdate every day at midnight
|
||||||
|
# and create a pull request if any changes
|
||||||
|
|
||||||
|
name: Pre-commit auto-update
|
||||||
|
|
||||||
|
on:
|
||||||
|
schedule:
|
||||||
|
- cron: "15 2 * * *"
|
||||||
|
workflow_dispatch: # to trigger manually
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
auto-update:
|
||||||
|
# Disables this workflow from running in a repository that is not part of the indicated organization/user
|
||||||
|
if: github.repository_owner == 'cookiecutter'
|
||||||
|
permissions:
|
||||||
|
contents: write # for peter-evans/create-pull-request to create branch
|
||||||
|
pull-requests: write # for peter-evans/create-pull-request to create a PR
|
||||||
|
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
- uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: "3.10"
|
||||||
|
|
||||||
|
- name: Install pre-commit
|
||||||
|
run: pip install pre-commit
|
||||||
|
|
||||||
|
- name: Autoupdate template
|
||||||
|
run: pre-commit autoupdate
|
||||||
|
|
||||||
|
- name: Autoupdate generated projects
|
||||||
|
working-directory: "{{cookiecutter.project_slug}}"
|
||||||
|
run: pre-commit autoupdate
|
||||||
|
|
||||||
|
- name: Create Pull Request
|
||||||
|
uses: peter-evans/create-pull-request@v4
|
||||||
|
with:
|
||||||
|
token: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
branch: update/pre-commit-autoupdate
|
||||||
|
title: Auto-update pre-commit hooks
|
||||||
|
commit-message: Auto-update pre-commit hooks
|
||||||
|
body: Update versions of tools in pre-commit configs to latest version
|
||||||
|
labels: update
|
34
.github/workflows/update-changelog.yml
vendored
Normal file
34
.github/workflows/update-changelog.yml
vendored
Normal file
|
@ -0,0 +1,34 @@
|
||||||
|
name: Update Changelog
|
||||||
|
|
||||||
|
on:
|
||||||
|
# Every day at 2am
|
||||||
|
schedule:
|
||||||
|
- cron: "0 2 * * *"
|
||||||
|
# Manual trigger
|
||||||
|
workflow_dispatch:
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
release:
|
||||||
|
# Disables this workflow from running in a repository that is not part of the indicated organization/user
|
||||||
|
if: github.repository_owner == 'cookiecutter'
|
||||||
|
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Set up Python
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: "3.10"
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
pip install -r requirements.txt
|
||||||
|
- name: Set git details
|
||||||
|
run: |
|
||||||
|
git config --global user.name "github-actions"
|
||||||
|
git config --global user.email "action@github.com"
|
||||||
|
- name: Update list
|
||||||
|
run: python scripts/update_changelog.py
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
39
.github/workflows/update-contributors.yml
vendored
Normal file
39
.github/workflows/update-contributors.yml
vendored
Normal file
|
@ -0,0 +1,39 @@
|
||||||
|
name: Update Contributors
|
||||||
|
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
|
||||||
|
permissions:
|
||||||
|
contents: read
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
build:
|
||||||
|
# Disables this workflow from running in a repository that is not part of the indicated organization/user
|
||||||
|
if: github.repository_owner == 'cookiecutter'
|
||||||
|
permissions:
|
||||||
|
contents: write # for stefanzweifel/git-auto-commit-action to push code in repo
|
||||||
|
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Set up Python
|
||||||
|
uses: actions/setup-python@v4
|
||||||
|
with:
|
||||||
|
python-version: "3.10"
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
pip install -r requirements.txt
|
||||||
|
- name: Update list
|
||||||
|
run: python scripts/update_contributors.py
|
||||||
|
env:
|
||||||
|
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||||
|
|
||||||
|
- name: Commit changes
|
||||||
|
uses: stefanzweifel/git-auto-commit-action@v4.16.0
|
||||||
|
with:
|
||||||
|
commit_message: Update Contributors
|
||||||
|
file_pattern: CONTRIBUTORS.md .github/contributors.json
|
12
.gitignore
vendored
12
.gitignore
vendored
|
@ -208,18 +208,6 @@ Session.vim
|
||||||
tags
|
tags
|
||||||
|
|
||||||
|
|
||||||
### VirtualEnv template
|
|
||||||
# Virtualenv
|
|
||||||
# http://iamzed.com/2009/05/07/a-primer-on-virtualenv/
|
|
||||||
[Bb]in
|
|
||||||
[Ii]nclude
|
|
||||||
[Ll]ib
|
|
||||||
[Ll]ib64
|
|
||||||
[Ss]cripts
|
|
||||||
pyvenv.cfg
|
|
||||||
pip-selfcheck.json
|
|
||||||
|
|
||||||
|
|
||||||
# Even though the project might be opened and edited
|
# Even though the project might be opened and edited
|
||||||
# in any of the JetBrains IDEs, it makes no sence whatsoever
|
# in any of the JetBrains IDEs, it makes no sence whatsoever
|
||||||
# to 'run' anything within it since any particular cookiecutter
|
# to 'run' anything within it since any particular cookiecutter
|
||||||
|
|
36
.pre-commit-config.yaml
Normal file
36
.pre-commit-config.yaml
Normal file
|
@ -0,0 +1,36 @@
|
||||||
|
exclude: "{{cookiecutter.project_slug}}"
|
||||||
|
default_stages: [commit]
|
||||||
|
|
||||||
|
repos:
|
||||||
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
|
rev: v4.4.0
|
||||||
|
hooks:
|
||||||
|
- id: trailing-whitespace
|
||||||
|
- id: check-yaml
|
||||||
|
|
||||||
|
- repo: https://github.com/asottile/pyupgrade
|
||||||
|
rev: v3.3.1
|
||||||
|
hooks:
|
||||||
|
- id: pyupgrade
|
||||||
|
args: [--py310-plus]
|
||||||
|
exclude: hooks/
|
||||||
|
|
||||||
|
- repo: https://github.com/psf/black
|
||||||
|
rev: 23.1.0
|
||||||
|
hooks:
|
||||||
|
- id: black
|
||||||
|
|
||||||
|
- repo: https://github.com/PyCQA/isort
|
||||||
|
rev: 5.12.0
|
||||||
|
hooks:
|
||||||
|
- id: isort
|
||||||
|
|
||||||
|
- repo: https://github.com/PyCQA/flake8
|
||||||
|
rev: 6.0.0
|
||||||
|
hooks:
|
||||||
|
- id: flake8
|
||||||
|
|
||||||
|
ci:
|
||||||
|
autoupdate_schedule: weekly
|
||||||
|
skip: []
|
||||||
|
submodules: false
|
|
@ -12,3 +12,10 @@ pin: True
|
||||||
# requires private repo permissions, even on public repos
|
# requires private repo permissions, even on public repos
|
||||||
# default: empty
|
# default: empty
|
||||||
label_prs: update
|
label_prs: update
|
||||||
|
|
||||||
|
requirements:
|
||||||
|
- "requirements.txt"
|
||||||
|
- "docs/requirements.txt"
|
||||||
|
- "{{cookiecutter.project_slug}}/requirements/base.txt"
|
||||||
|
- "{{cookiecutter.project_slug}}/requirements/local.txt"
|
||||||
|
- "{{cookiecutter.project_slug}}/requirements/production.txt"
|
||||||
|
|
15
.readthedocs.yaml
Normal file
15
.readthedocs.yaml
Normal file
|
@ -0,0 +1,15 @@
|
||||||
|
# Read the Docs configuration file
|
||||||
|
# See https://docs.readthedocs.io/en/stable/config-file/v2.html for details
|
||||||
|
|
||||||
|
# Required
|
||||||
|
version: 2
|
||||||
|
|
||||||
|
# Build documentation in the docs/ directory with Sphinx
|
||||||
|
sphinx:
|
||||||
|
configuration: docs/conf.py
|
||||||
|
|
||||||
|
# Version of Python and requirements required to build the docs
|
||||||
|
python:
|
||||||
|
version: "3.8"
|
||||||
|
install:
|
||||||
|
- requirements: docs/requirements.txt
|
36
.travis.yml
36
.travis.yml
|
@ -1,36 +0,0 @@
|
||||||
services:
|
|
||||||
- docker
|
|
||||||
|
|
||||||
language: python
|
|
||||||
|
|
||||||
python: 3.8
|
|
||||||
|
|
||||||
before_install:
|
|
||||||
- docker-compose -v
|
|
||||||
- docker -v
|
|
||||||
|
|
||||||
matrix:
|
|
||||||
include:
|
|
||||||
- name: Test results
|
|
||||||
script: tox -e py38
|
|
||||||
- name: Black template
|
|
||||||
script: tox -e black-template
|
|
||||||
- name: Basic Docker
|
|
||||||
script: sh tests/test_docker.sh
|
|
||||||
- name: Extended Docker
|
|
||||||
script: sh tests/test_docker.sh use_celery=y use_drf=y
|
|
||||||
- name: Bare metal
|
|
||||||
script: sh tests/test_bare.sh use_celery=y use_compressor=y
|
|
||||||
services:
|
|
||||||
- postgresql
|
|
||||||
- redis-server
|
|
||||||
env:
|
|
||||||
- CELERY_BROKER_URL=redis://localhost:6379/0
|
|
||||||
|
|
||||||
install:
|
|
||||||
- pip install tox
|
|
||||||
|
|
||||||
notifications:
|
|
||||||
email:
|
|
||||||
on_success: change
|
|
||||||
on_failure: always
|
|
2176
CHANGELOG.md
2176
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
3
CODE_OF_CONDUCT.md
Normal file
3
CODE_OF_CONDUCT.md
Normal file
|
@ -0,0 +1,3 @@
|
||||||
|
## Code of Conduct
|
||||||
|
|
||||||
|
Everyone who interacts in the Cookiecutter project's codebase, issue trackers, chat rooms, and mailing lists is expected to follow the [PyPA Code of Conduct](https://www.pypa.io/en/latest/code-of-conduct/).
|
42
CONTRIBUTING.md
Normal file
42
CONTRIBUTING.md
Normal file
|
@ -0,0 +1,42 @@
|
||||||
|
# How to Contribute
|
||||||
|
|
||||||
|
Always happy to get issues identified and pull requests!
|
||||||
|
|
||||||
|
## Getting your pull request merged in
|
||||||
|
|
||||||
|
1. Keep it small. The smaller the pull request, the more likely we are to accept.
|
||||||
|
2. Pull requests that fix a current issue get priority for review.
|
||||||
|
|
||||||
|
## Testing
|
||||||
|
|
||||||
|
### Installation
|
||||||
|
|
||||||
|
Please install [tox](https://tox.readthedocs.io/en/latest/), which is a generic virtualenv management and test command line tool.
|
||||||
|
|
||||||
|
[tox](https://tox.readthedocs.io/en/latest/) is available for download from [PyPI](https://pypi.python.org/pypi) via [pip](https://pypi.python.org/pypi/pip/):
|
||||||
|
|
||||||
|
$ pip install tox
|
||||||
|
|
||||||
|
It will automatically create a fresh virtual environment and install our test dependencies,
|
||||||
|
such as [pytest-cookies](https://pypi.python.org/pypi/pytest-cookies/) and [flake8](https://pypi.python.org/pypi/flake8/).
|
||||||
|
|
||||||
|
### Run the Tests
|
||||||
|
|
||||||
|
Tox uses pytest under the hood, hence it supports the same syntax for selecting tests.
|
||||||
|
|
||||||
|
For further information please consult the [pytest usage docs](https://pytest.org/latest/usage.html#specifying-tests-selecting-tests).
|
||||||
|
|
||||||
|
To run all tests using various versions of python in virtualenvs defined in tox.ini, just run tox.:
|
||||||
|
|
||||||
|
$ tox
|
||||||
|
|
||||||
|
It is possible to test with a specific version of python. To do this, the command
|
||||||
|
is:
|
||||||
|
|
||||||
|
$ tox -e py310
|
||||||
|
|
||||||
|
This will run pytest with the python3.10 interpreter, for example.
|
||||||
|
|
||||||
|
To run a particular test with tox for against your current Python version:
|
||||||
|
|
||||||
|
$ tox -e py -- -k test_default_configuration
|
|
@ -1,55 +0,0 @@
|
||||||
How to Contribute
|
|
||||||
=================
|
|
||||||
|
|
||||||
Always happy to get issues identified and pull requests!
|
|
||||||
|
|
||||||
Getting your pull request merged in
|
|
||||||
------------------------------------
|
|
||||||
|
|
||||||
#. Keep it small. The smaller the pull request the more likely I'll pull it in.
|
|
||||||
#. Pull requests that fix a current issue get priority for review.
|
|
||||||
#. If you're not already in the `CONTRIBUTORS.rst` file, add yourself!
|
|
||||||
|
|
||||||
Testing
|
|
||||||
-------
|
|
||||||
|
|
||||||
Installation
|
|
||||||
~~~~~~~~~~~~
|
|
||||||
|
|
||||||
Please install `tox`_, which is a generic virtualenv management and test command line tool.
|
|
||||||
|
|
||||||
`tox`_ is available for download from `PyPI`_ via `pip`_::
|
|
||||||
|
|
||||||
$ pip install tox
|
|
||||||
|
|
||||||
It will automatically create a fresh virtual environment and install our test dependencies,
|
|
||||||
such as `pytest-cookies`_ and `flake8`_.
|
|
||||||
|
|
||||||
Run the Tests
|
|
||||||
~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
Tox uses py.test under the hood, hence it supports the same syntax for selecting tests.
|
|
||||||
|
|
||||||
For further information please consult the `pytest usage docs`_.
|
|
||||||
|
|
||||||
To run all tests using various versions of python in virtualenvs defined in tox.ini, just run tox.::
|
|
||||||
|
|
||||||
$ tox
|
|
||||||
|
|
||||||
It is possible to test with a specific version of python. To do this, the command
|
|
||||||
is::
|
|
||||||
|
|
||||||
$ tox -e py38
|
|
||||||
|
|
||||||
This will run py.test with the python3.8 interpreter, for example.
|
|
||||||
|
|
||||||
To run a particular test with tox for against your current Python version::
|
|
||||||
|
|
||||||
$ tox -e py -- -k test_default_configuration
|
|
||||||
|
|
||||||
.. _`pytest usage docs`: https://pytest.org/latest/usage.html#specifying-tests-selecting-tests
|
|
||||||
.. _`tox`: https://tox.readthedocs.io/en/latest/
|
|
||||||
.. _`pip`: https://pypi.python.org/pypi/pip/
|
|
||||||
.. _`pytest-cookies`: https://pypi.python.org/pypi/pytest-cookies/
|
|
||||||
.. _`flake8`: https://pypi.python.org/pypi/flake8/
|
|
||||||
.. _`PyPI`: https://pypi.python.org/pypi
|
|
1935
CONTRIBUTORS.md
Normal file
1935
CONTRIBUTORS.md
Normal file
File diff suppressed because it is too large
Load Diff
416
CONTRIBUTORS.rst
416
CONTRIBUTORS.rst
|
@ -1,416 +0,0 @@
|
||||||
Contributors
|
|
||||||
============
|
|
||||||
|
|
||||||
Core Developers
|
|
||||||
---------------
|
|
||||||
|
|
||||||
These contributors have commit flags for the repository,
|
|
||||||
and are able to accept and merge pull requests.
|
|
||||||
|
|
||||||
=========================== ================= ===========
|
|
||||||
Name Github Twitter
|
|
||||||
=========================== ================= ===========
|
|
||||||
Daniel Roy Greenfeld `@pydanny`_ @pydanny
|
|
||||||
Audrey Roy Greenfeld* `@audreyr`_ @audreyr
|
|
||||||
Fábio C. Barrionuevo da Luz `@luzfcb`_ @luzfcb
|
|
||||||
Saurabh Kumar `@theskumar`_ @_theskumar
|
|
||||||
Jannis Gebauer `@jayfk`_
|
|
||||||
Burhan Khalid `@burhan`_ @burhan
|
|
||||||
Nikita Shupeyko `@webyneter`_ @webyneter
|
|
||||||
Bruno Alla `@browniebroke`_ @_BrunoAlla
|
|
||||||
Wan Liuyang `@sfdye`_ @sfdye
|
|
||||||
=========================== ================= ===========
|
|
||||||
|
|
||||||
*Audrey is also the creator of Cookiecutter. Audrey and
|
|
||||||
Daniel are on the Cookiecutter core team.*
|
|
||||||
|
|
||||||
.. _@pydanny: https://github.com/pydanny
|
|
||||||
.. _@luzfcb: https://github.com/luzfcb
|
|
||||||
.. _@theskumar: https://github.com/theskumar
|
|
||||||
.. _@audreyr: https://github.com/audreyr
|
|
||||||
.. _@jayfk: https://github.com/jayfk
|
|
||||||
.. _@webyneter: https://github.com/webyneter
|
|
||||||
.. _@browniebroke: https://github.com/browniebroke
|
|
||||||
.. _@sfdye: https://github.com/sfdye
|
|
||||||
|
|
||||||
Other Contributors
|
|
||||||
------------------
|
|
||||||
|
|
||||||
Listed in alphabetical order.
|
|
||||||
|
|
||||||
========================== ============================ ==============
|
|
||||||
Name Github Twitter
|
|
||||||
========================== ============================ ==============
|
|
||||||
18 `@dezoito`_
|
|
||||||
2O4 `@2O4`_
|
|
||||||
a7p `@a7p`_
|
|
||||||
Aaron Eikenberry `@aeikenberry`_
|
|
||||||
Adam Bogdał `@bogdal`_
|
|
||||||
Adam Dobrawy `@ad-m`_
|
|
||||||
Adam Steele `@adammsteele`_
|
|
||||||
Agam Dua
|
|
||||||
Agustín Scaramuzza `@scaramagus`_ @scaramagus
|
|
||||||
Alberto Sanchez `@alb3rto`_
|
|
||||||
Alex Tsai `@caffodian`_
|
|
||||||
Alvaro [Andor] `@andor-pierdelacabeza`_
|
|
||||||
Amjith Ramanujam `@amjith`_
|
|
||||||
Andreas Meistad `@ameistad`_
|
|
||||||
Andres Gonzalez `@andresgz`_
|
|
||||||
Andrew Mikhnevich `@zcho`_
|
|
||||||
Andrew Chen Wang `@Andrew-Chen-Wang`_
|
|
||||||
Andy Rose
|
|
||||||
Anna Callahan `@jazztpt`_
|
|
||||||
Anna Sidwell `@takkaria`_
|
|
||||||
Antonia Blair `@antoniablair`_ @antoniablairart
|
|
||||||
Anuj Bansal `@ahhda`_
|
|
||||||
Arcuri Davide `@dadokkio`_
|
|
||||||
Areski Belaid `@areski`_
|
|
||||||
Ashley Camba
|
|
||||||
Barclay Gauld `@yunti`_
|
|
||||||
Bartek `@btknu`_
|
|
||||||
Ben Lopatin
|
|
||||||
Ben Warren `@bwarren2`_
|
|
||||||
Benjamin Abel
|
|
||||||
Bert de Miranda `@bertdemiranda`_
|
|
||||||
Bo Lopker `@blopker`_
|
|
||||||
Bo Peng `@BoPeng`_
|
|
||||||
Bouke Haarsma
|
|
||||||
Brent Payne `@brentpayne`_ @brentpayne
|
|
||||||
Bruce Olivier `@bolivierjr`_
|
|
||||||
Burhan Khalid `@burhan`_ @burhan
|
|
||||||
Caio Ariede `@caioariede`_ @caioariede
|
|
||||||
Carl Johnson `@carlmjohnson`_ @carlmjohnson
|
|
||||||
Catherine Devlin `@catherinedevlin`_
|
|
||||||
Cédric Gaspoz `@cgaspoz`_
|
|
||||||
Charlie Smith `@chuckus`_
|
|
||||||
Chris Curvey `@ccurvey`_
|
|
||||||
Chris Franklin
|
|
||||||
Chris Franklin `@hairychris`_
|
|
||||||
Chris Pappalardo `@ChrisPappalardo`_
|
|
||||||
Christopher Clarke `@chrisdev`_
|
|
||||||
Cole Mackenzie `@cmackenzie1`_
|
|
||||||
Cole Maclean `@cole`_ @cole
|
|
||||||
Collederas `@Collederas`_
|
|
||||||
Craig Margieson `@cmargieson`_
|
|
||||||
Cristian Vargas `@cdvv7788`_
|
|
||||||
Cullen Rhodes `@c-rhodes`_
|
|
||||||
Curtis St Pierre `@curtisstpierre`_ @cstpierre1388
|
|
||||||
Dan Shultz `@shultz`_
|
|
||||||
Dani Hodovic `@danihodovic`_
|
|
||||||
Daniel Hepper `@dhepper`_ @danielhepper
|
|
||||||
Daniel Hillier `@danifus`_
|
|
||||||
Daniel Sears `@highpost`_ @highpost
|
|
||||||
Daniele Tricoli `@eriol`_
|
|
||||||
David Díaz `@ddiazpinto`_ @DavidDiazPinto
|
|
||||||
Davit Tovmasyan `@davitovmasyan`_
|
|
||||||
Davur Clementsen `@dsclementsen`_ @davur
|
|
||||||
Delio Castillo `@jangeador`_ @jangeador
|
|
||||||
Demetris Stavrou `@demestav`_
|
|
||||||
Denis Bobrov `@delneg`_
|
|
||||||
Denis Orehovsky `@apirobot`_
|
|
||||||
Denis Savran `@blaxpy`_
|
|
||||||
Diane Chen `@purplediane`_ @purplediane88
|
|
||||||
Dónal Adams `@epileptic-fish`_
|
|
||||||
Dong Huynh `@trungdong`_
|
|
||||||
Duda Nogueira `@dudanogueira`_ @dudanogueira
|
|
||||||
Emanuel Calso `@bloodpet`_ @bloodpet
|
|
||||||
Eraldo Energy `@eraldo`_
|
|
||||||
Eric Groom `@ericgroom`_
|
|
||||||
Ernesto Cedeno `@codnee`_
|
|
||||||
Eyad Al Sibai `@eyadsibai`_
|
|
||||||
Felipe Arruda `@arruda`_
|
|
||||||
Florian Idelberger `@step21`_ @windrush
|
|
||||||
Gabriel Mejia `@elgartoinf`_ @elgartoinf
|
|
||||||
Garry Cairns `@garry-cairns`_
|
|
||||||
Garry Polley `@garrypolley`_
|
|
||||||
Gilbishkosma `@Gilbishkosma`_
|
|
||||||
Glenn Wiskur `@gwiskur`_
|
|
||||||
Guilherme Guy `@guilherme1guy`_
|
|
||||||
Hamish Durkin `@durkode`_
|
|
||||||
Hana Quadara `@hanaquadara`_
|
|
||||||
Hannah Lazarus `@hanhanhan`_
|
|
||||||
Harry Moreno `@morenoh149`_ @morenoh149
|
|
||||||
Harry Percival `@hjwp`_
|
|
||||||
Hendrik Schneider `@hendrikschneider`_
|
|
||||||
Henrique G. G. Pereira `@ikkebr`_
|
|
||||||
Howie Zhao `@howiezhao`_
|
|
||||||
Ian Lee `@IanLee1521`_
|
|
||||||
Irfan Ahmad `@erfaan`_ @erfaan
|
|
||||||
Isaac12x `@Isaac12x`_
|
|
||||||
Ivan Khomutov `@ikhomutov`_
|
|
||||||
James Williams `@jameswilliams1`_
|
|
||||||
Jan Van Bruggen `@jvanbrug`_
|
|
||||||
Jelmer Draaijer `@foarsitter`_
|
|
||||||
Jerome Caisip `@jeromecaisip`_
|
|
||||||
Jens Nilsson `@phiberjenz`_
|
|
||||||
Jerome Leclanche `@jleclanche`_ @Adys
|
|
||||||
Jimmy Gitonga `@afrowave`_ @afrowave
|
|
||||||
John Cass `@jcass77`_ @cass_john
|
|
||||||
Jonathan Thompson `@nojanath`_
|
|
||||||
Jules Cheron `@jules-ch`_
|
|
||||||
Julien Almarcha `@sladinji`_
|
|
||||||
Julio Castillo `@juliocc`_
|
|
||||||
Kaido Kert `@kaidokert`_
|
|
||||||
kappataumu `@kappataumu`_ @kappataumu
|
|
||||||
Kaveh `@ka7eh`_
|
|
||||||
Keith Bailey `@keithjeb`_
|
|
||||||
Keith Webber `@townie`_
|
|
||||||
Kevin A. Stone
|
|
||||||
Kevin Ndung'u `@kevgathuku`_
|
|
||||||
Keyvan Mosharraf `@keyvanm`_
|
|
||||||
Krzysztof Szumny `@noisy`_
|
|
||||||
Krzysztof Żuraw `@krzysztofzuraw`_
|
|
||||||
Leo won `@leollon`_
|
|
||||||
Leo Zhou `@glasslion`_
|
|
||||||
Leon Kim `@PilhwanKim`_
|
|
||||||
Leonardo Jimenez `@xpostudio4`_
|
|
||||||
Lin Xianyi `@iynaix`_
|
|
||||||
Luis Nell `@originell`_
|
|
||||||
Lukas Klein
|
|
||||||
Lyla Fischer
|
|
||||||
Malik Sulaimanov `@flyudvik`_ @flyudvik
|
|
||||||
Martin Blech
|
|
||||||
Martin Saizar `@msaizar`_
|
|
||||||
Mateusz Ostaszewski `@mostaszewski`_
|
|
||||||
Mathijs Hoogland `@MathijsHoogland`_
|
|
||||||
Matt Braymer-Hayes `@mattayes`_ @mattayes
|
|
||||||
Matt Knapper `@mknapper1`_
|
|
||||||
Matt Linares
|
|
||||||
Matt Menzenski `@menzenski`_
|
|
||||||
Matt Warren `@mfwarren`_
|
|
||||||
Matthew Sisley `@mjsisley`_
|
|
||||||
Meghan Heintz `@dot2dotseurat`_
|
|
||||||
Mesut Yılmaz `@myilmaz`_
|
|
||||||
Michael Gecht `@mimischi`_ @_mischi
|
|
||||||
Michael Samoylov `@msamoylov`_
|
|
||||||
Min ho Kim `@minho42`_
|
|
||||||
mozillazg `@mozillazg`_
|
|
||||||
Nico Stefani `@nicolas471`_ @moby_dick91
|
|
||||||
Oleg Russkin `@rolep`_
|
|
||||||
Pablo `@oubiga`_
|
|
||||||
Parbhat Puri `@parbhat`_
|
|
||||||
Pawan Chaurasia `@rjsnh1522`_
|
|
||||||
Peter Bittner `@bittner`_
|
|
||||||
Peter Coles `@mrcoles`_
|
|
||||||
Philipp Matthies `@canonnervio`_
|
|
||||||
Pierre Chiquet `@pchiquet`_
|
|
||||||
Raony Guimarães Corrêa `@raonyguimaraes`_
|
|
||||||
Raphael Pierzina `@hackebrot`_
|
|
||||||
Reggie Riser `@reggieriser`_
|
|
||||||
René Muhl `@rm--`_
|
|
||||||
Roman Afanaskin `@siauPatrick`_
|
|
||||||
Roman Osipenko `@romanosipenko`_
|
|
||||||
Russell Davies
|
|
||||||
Sam Collins `@MightySCollins`_
|
|
||||||
Sascha `@saschalalala`_ @saschalalala
|
|
||||||
Shupeyko Nikita `@webyneter`_
|
|
||||||
Sławek Ehlert `@slafs`_
|
|
||||||
Sorasful `@sorasful`_
|
|
||||||
Srinivas Nyayapati `@shireenrao`_
|
|
||||||
stepmr `@stepmr`_
|
|
||||||
Steve Steiner `@ssteinerX`_
|
|
||||||
Sudarshan Wadkar `@wadkar`_
|
|
||||||
Sule Marshall `@suledev`_
|
|
||||||
Tano Abeleyra `@tanoabeleyra`_
|
|
||||||
Taylor Baldwin
|
|
||||||
Théo Segonds `@show0k`_
|
|
||||||
Tim Claessens `@timclaessens`_
|
|
||||||
Tim Freund `@timfreund`_
|
|
||||||
Tom Atkins `@knitatoms`_
|
|
||||||
Tom Offermann
|
|
||||||
Travis McNeill `@Travistock`_ @tavistock_esq
|
|
||||||
Tubo Shi `@Tubo`_
|
|
||||||
Umair Ashraf `@umrashrf`_ @fabumair
|
|
||||||
Vadim Iskuchekov `@Egregors`_ @egregors
|
|
||||||
Vicente G. Reyes `@reyesvicente`_ @highcenburg
|
|
||||||
Vitaly Babiy
|
|
||||||
Vivian Guillen `@viviangb`_
|
|
||||||
Vlad Doster `@vladdoster`_
|
|
||||||
Will Farley `@goldhand`_ @g01dhand
|
|
||||||
William Archinal `@archinal`_
|
|
||||||
Xaver Y.R. Chen `@yrchen`_ @yrchen
|
|
||||||
Yaroslav Halchenko
|
|
||||||
Yuchen Xie `@mapx`_
|
|
||||||
========================== ============================ ==============
|
|
||||||
|
|
||||||
.. _@a7p: https://github.com/a7p
|
|
||||||
.. _@2O4: https://github.com/2O4
|
|
||||||
.. _@ad-m: https://github.com/ad-m
|
|
||||||
.. _@adammsteele: https://github.com/adammsteele
|
|
||||||
.. _@aeikenberry: https://github.com/aeikenberry
|
|
||||||
.. _@afrowave: https://github.com/afrowave
|
|
||||||
.. _@ahhda: https://github.com/ahhda
|
|
||||||
.. _@alb3rto: https://github.com/alb3rto
|
|
||||||
.. _@ameistad: https://github.com/ameistad
|
|
||||||
.. _@amjith: https://github.com/amjith
|
|
||||||
.. _@andor-pierdelacabeza: https://github.com/andor-pierdelacabeza
|
|
||||||
.. _@andresgz: https://github.com/andresgz
|
|
||||||
.. _@antoniablair: https://github.com/antoniablair
|
|
||||||
.. _@Andrew-Chen-Wang: https://github.com/Andrew-Chen-Wang
|
|
||||||
.. _@apirobot: https://github.com/apirobot
|
|
||||||
.. _@archinal: https://github.com/archinal
|
|
||||||
.. _@areski: https://github.com/areski
|
|
||||||
.. _@arruda: https://github.com/arruda
|
|
||||||
.. _@bertdemiranda: https://github.com/bertdemiranda
|
|
||||||
.. _@bittner: https://github.com/bittner
|
|
||||||
.. _@blaxpy: https://github.com/blaxpy
|
|
||||||
.. _@bloodpet: https://github.com/bloodpet
|
|
||||||
.. _@blopker: https://github.com/blopker
|
|
||||||
.. _@bogdal: https://github.com/bogdal
|
|
||||||
.. _@bolivierjr: https://github.com/bolivierjr
|
|
||||||
.. _@BoPeng: https://github.com/BoPeng
|
|
||||||
.. _@brentpayne: https://github.com/brentpayne
|
|
||||||
.. _@btknu: https://github.com/btknu
|
|
||||||
.. _@burhan: https://github.com/burhan
|
|
||||||
.. _@bwarren2: https://github.com/bwarren2
|
|
||||||
.. _@c-rhodes: https://github.com/c-rhodes
|
|
||||||
.. _@caffodian: https://github.com/caffodian
|
|
||||||
.. _@canonnervio: https://github.com/canonnervio
|
|
||||||
.. _@caioariede: https://github.com/caioariede
|
|
||||||
.. _@carlmjohnson: https://github.com/carlmjohnson
|
|
||||||
.. _@catherinedevlin: https://github.com/catherinedevlin
|
|
||||||
.. _@ccurvey: https://github.com/ccurvey
|
|
||||||
.. _@cdvv7788: https://github.com/cdvv7788
|
|
||||||
.. _@cgaspoz: https://github.com/cgaspoz
|
|
||||||
.. _@chrisdev: https://github.com/chrisdev
|
|
||||||
.. _@ChrisPappalardo: https://github.com/ChrisPappalardo
|
|
||||||
.. _@chuckus: https://github.com/chuckus
|
|
||||||
.. _@cmackenzie1: https://github.com/cmackenzie1
|
|
||||||
.. _@cmargieson: https://github.com/cmargieson
|
|
||||||
.. _@codnee: https://github.com/codnee
|
|
||||||
.. _@cole: https://github.com/cole
|
|
||||||
.. _@Collederas: https://github.com/Collederas
|
|
||||||
.. _@curtisstpierre: https://github.com/curtisstpierre
|
|
||||||
.. _@dadokkio: https://github.com/dadokkio
|
|
||||||
.. _@danihodovic: https://github.com/danihodovic
|
|
||||||
.. _@danifus: https://github.com/danifus
|
|
||||||
.. _@davitovmasyan: https://github.com/davitovmasyan
|
|
||||||
.. _@ddiazpinto: https://github.com/ddiazpinto
|
|
||||||
.. _@delneg: https://github.com/delneg
|
|
||||||
.. _@demestav: https://github.com/demestav
|
|
||||||
.. _@dezoito: https://github.com/dezoito
|
|
||||||
.. _@dhepper: https://github.com/dhepper
|
|
||||||
.. _@dot2dotseurat: https://github.com/dot2dotseurat
|
|
||||||
.. _@dudanogueira: https://github.com/dudanogueira
|
|
||||||
.. _@dsclementsen: https://github.com/dsclementsen
|
|
||||||
.. _@guilherme1guy: https://github.com/guilherme1guy
|
|
||||||
.. _@durkode: https://github.com/durkode
|
|
||||||
.. _@Egregors: https://github.com/Egregors
|
|
||||||
.. _@elgartoinf: https://gihub.com/elgartoinf
|
|
||||||
.. _@epileptic-fish: https://gihub.com/epileptic-fish
|
|
||||||
.. _@eraldo: https://github.com/eraldo
|
|
||||||
.. _@erfaan: https://github.com/erfaan
|
|
||||||
.. _@ericgroom: https://github.com/ericgroom
|
|
||||||
.. _@eriol: https://github.com/eriol
|
|
||||||
.. _@eyadsibai: https://github.com/eyadsibai
|
|
||||||
.. _@flyudvik: https://github.com/flyudvik
|
|
||||||
.. _@foarsitter: https://github.com/foarsitter
|
|
||||||
.. _@garry-cairns: https://github.com/garry-cairns
|
|
||||||
.. _@garrypolley: https://github.com/garrypolley
|
|
||||||
.. _@Gilbishkosma: https://github.com/Gilbishkosma
|
|
||||||
.. _@gwiskur: https://github.com/gwiskur
|
|
||||||
.. _@glasslion: https://github.com/glasslion
|
|
||||||
.. _@goldhand: https://github.com/goldhand
|
|
||||||
.. _@hackebrot: https://github.com/hackebrot
|
|
||||||
.. _@hairychris: https://github.com/hairychris
|
|
||||||
.. _@hanaquadara: https://github.com/hanaquadara
|
|
||||||
.. _@hanhanhan: https://github.com/hanhanhan
|
|
||||||
.. _@hendrikschneider: https://github.com/hendrikschneider
|
|
||||||
.. _@highpost: https://github.com/highpost
|
|
||||||
.. _@hjwp: https://github.com/hjwp
|
|
||||||
.. _@howiezhao: https://github.com/howiezhao
|
|
||||||
.. _@IanLee1521: https://github.com/IanLee1521
|
|
||||||
.. _@ikhomutov: https://github.com/ikhomutov
|
|
||||||
.. _@jameswilliams1: https://github.com/jameswilliams1
|
|
||||||
.. _@ikkebr: https://github.com/ikkebr
|
|
||||||
.. _@Isaac12x: https://github.com/Isaac12x
|
|
||||||
.. _@iynaix: https://github.com/iynaix
|
|
||||||
.. _@jangeador: https://github.com/jangeador
|
|
||||||
.. _@jazztpt: https://github.com/jazztpt
|
|
||||||
.. _@jcass77: https://github.com/jcass77
|
|
||||||
.. _@jeromecaisip: https://github.com/jeromecaisip
|
|
||||||
.. _@jleclanche: https://github.com/jleclanche
|
|
||||||
.. _@jules-ch: https://github.com/jules-ch
|
|
||||||
.. _@juliocc: https://github.com/juliocc
|
|
||||||
.. _@jvanbrug: https://github.com/jvanbrug
|
|
||||||
.. _@ka7eh: https://github.com/ka7eh
|
|
||||||
.. _@kaidokert: https://github.com/kaidokert
|
|
||||||
.. _@kappataumu: https://github.com/kappataumu
|
|
||||||
.. _@keithjeb: https://github.com/keithjeb
|
|
||||||
.. _@kevgathuku: https://github.com/kevgathuku
|
|
||||||
.. _@keyvanm: https://github.com/keyvanm
|
|
||||||
.. _@knitatoms: https://github.com/knitatoms
|
|
||||||
.. _@krzysztofzuraw: https://github.com/krzysztofzuraw
|
|
||||||
.. _@leollon: https://github.com/leollon
|
|
||||||
.. _@MathijsHoogland: https://github.com/MathijsHoogland
|
|
||||||
.. _@mapx: https://github.com/mapx
|
|
||||||
.. _@mattayes: https://github.com/mattayes
|
|
||||||
.. _@menzenski: https://github.com/menzenski
|
|
||||||
.. _@mfwarren: https://github.com/mfwarren
|
|
||||||
.. _@MightySCollins: https://github.com/MightySCollins
|
|
||||||
.. _@mimischi: https://github.com/mimischi
|
|
||||||
.. _@minho42: https://github.com/minho42
|
|
||||||
.. _@mjsisley: https://github.com/mjsisley
|
|
||||||
.. _@mknapper1: https://github.com/mknapper1
|
|
||||||
.. _@morenoh149: https://github.com/morenoh149
|
|
||||||
.. _@mostaszewski: https://github.com/mostaszewski
|
|
||||||
.. _@mozillazg: https://github.com/mozillazg
|
|
||||||
.. _@mrcoles: https://github.com/mrcoles
|
|
||||||
.. _@msaizar: https://github.com/msaizar
|
|
||||||
.. _@msamoylov: https://github.com/msamoylov
|
|
||||||
.. _@myilmaz: https://github.com/myilmaz
|
|
||||||
.. _@nicolas471: https://github.com/nicolas471
|
|
||||||
.. _@noisy: https://github.com/noisy
|
|
||||||
.. _@nojanath: https://github.com/nojanath
|
|
||||||
.. _@originell: https://github.com/originell
|
|
||||||
.. _@oubiga: https://github.com/oubiga
|
|
||||||
.. _@parbhat: https://github.com/parbhat
|
|
||||||
.. _@rjsnh1522: https://github.com/rjsnh1522
|
|
||||||
.. _@pchiquet: https://github.com/pchiquet
|
|
||||||
.. _@phiberjenz: https://github.com/phiberjenz
|
|
||||||
.. _@PilhwanKim: https://github.com/PilhwanKim
|
|
||||||
.. _@purplediane: https://github.com/purplediane
|
|
||||||
.. _@raonyguimaraes: https://github.com/raonyguimaraes
|
|
||||||
.. _@reggieriser: https://github.com/reggieriser
|
|
||||||
.. _@reyesvicente: https://github.com/reyesvicente
|
|
||||||
.. _@rm--: https://github.com/rm--
|
|
||||||
.. _@rolep: https://github.com/rolep
|
|
||||||
.. _@romanosipenko: https://github.com/romanosipenko
|
|
||||||
.. _@saschalalala: https://github.com/saschalalala
|
|
||||||
.. _@scaramagus: https://github.com/scaramagus
|
|
||||||
.. _@shireenrao: https://github.com/shireenrao
|
|
||||||
.. _@show0k: https://github.com/show0k
|
|
||||||
.. _@shultz: https://github.com/shultz
|
|
||||||
.. _@siauPatrick: https://github.com/siauPatrick
|
|
||||||
.. _@sladinji: https://github.com/sladinji
|
|
||||||
.. _@slafs: https://github.com/slafs
|
|
||||||
.. _@sorasful:: https://github.com/sorasful
|
|
||||||
.. _@ssteinerX: https://github.com/ssteinerx
|
|
||||||
.. _@step21: https://github.com/step21
|
|
||||||
.. _@stepmr: https://github.com/stepmr
|
|
||||||
.. _@suledev: https://github.com/suledev
|
|
||||||
.. _@takkaria: https://github.com/takkaria
|
|
||||||
.. _@tanoabeleyra: https://github.com/tanoabeleyra
|
|
||||||
.. _@timclaessens: https://github.com/timclaessens
|
|
||||||
.. _@timfreund: https://github.com/timfreund
|
|
||||||
.. _@townie: https://github.com/townie
|
|
||||||
.. _@Travistock: https://github.com/Tavistock
|
|
||||||
.. _@trungdong: https://github.com/trungdong
|
|
||||||
.. _@Tubo: https://github.com/tubo
|
|
||||||
.. _@umrashrf: https://github.com/umrashrf
|
|
||||||
.. _@viviangb: https://github.com/viviangb
|
|
||||||
.. _@vladdoster: https://github.com/vladdoster
|
|
||||||
.. _@wadkar: https://github.com/wadkar
|
|
||||||
.. _@xpostudio4: https://github.com/xpostudio4
|
|
||||||
.. _@yrchen: https://github.com/yrchen
|
|
||||||
.. _@yunti: https://github.com/yunti
|
|
||||||
.. _@zcho: https://github.com/zcho
|
|
||||||
|
|
||||||
Special Thanks
|
|
||||||
~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
The following haven't provided code directly, but have provided guidance and advice.
|
|
||||||
|
|
||||||
* Jannis Leidel
|
|
||||||
* Nate Aune
|
|
||||||
* Barry Morrison
|
|
248
README.md
Normal file
248
README.md
Normal file
|
@ -0,0 +1,248 @@
|
||||||
|
# Cookiecutter Django
|
||||||
|
|
||||||
|
[](https://github.com/cookiecutter/cookiecutter-django/actions/workflows/ci.yml?query=branch%3Amaster)
|
||||||
|
[](https://cookiecutter-django.readthedocs.io/en/latest/?badge=latest)
|
||||||
|
[](https://pyup.io/repos/github/cookiecutter/cookiecutter-django/)
|
||||||
|
[](https://discord.gg/uFXweDQc5a)
|
||||||
|
[](https://www.codetriage.com/cookiecutter/cookiecutter-django)
|
||||||
|
[](https://github.com/ambv/black)
|
||||||
|
|
||||||
|
Powered by [Cookiecutter](https://github.com/cookiecutter/cookiecutter), Cookiecutter Django is a framework for jumpstarting
|
||||||
|
production-ready Django projects quickly.
|
||||||
|
|
||||||
|
- Documentation: <https://cookiecutter-django.readthedocs.io/en/latest/>
|
||||||
|
- See [Troubleshooting](https://cookiecutter-django.readthedocs.io/en/latest/troubleshooting.html) for common errors and obstacles
|
||||||
|
- If you have problems with Cookiecutter Django, please open [issues](https://github.com/cookiecutter/cookiecutter-django/issues/new) don't send
|
||||||
|
emails to the maintainers.
|
||||||
|
|
||||||
|
## Features
|
||||||
|
|
||||||
|
- For Django 4.0
|
||||||
|
- Works with Python 3.10
|
||||||
|
- Renders Django projects with 100% starting test coverage
|
||||||
|
- Twitter [Bootstrap](https://github.com/twbs/bootstrap) v5
|
||||||
|
- [12-Factor](http://12factor.net/) based settings via [django-environ](https://github.com/joke2k/django-environ)
|
||||||
|
- Secure by default. We believe in SSL.
|
||||||
|
- Optimized development and production settings
|
||||||
|
- Registration via [django-allauth](https://github.com/pennersr/django-allauth)
|
||||||
|
- Comes with custom user model ready to go
|
||||||
|
- Optional basic ASGI setup for Websockets
|
||||||
|
- Optional custom static build using Gulp or Webpack
|
||||||
|
- Send emails via [Anymail](https://github.com/anymail/django-anymail) (using [Mailgun](http://www.mailgun.com/) by default or Amazon SES if AWS is selected cloud provider, but switchable)
|
||||||
|
- Media storage using Amazon S3, Google Cloud Storage or Azure Storage
|
||||||
|
- Docker support using [docker-compose](https://github.com/docker/compose) for development and production (using [Traefik](https://traefik.io/) with [LetsEncrypt](https://letsencrypt.org/) support)
|
||||||
|
- [Procfile](https://devcenter.heroku.com/articles/procfile) for deploying to Heroku
|
||||||
|
- Instructions for deploying to [PythonAnywhere](https://www.pythonanywhere.com/)
|
||||||
|
- Run tests with unittest or pytest
|
||||||
|
- Customizable PostgreSQL version
|
||||||
|
- Default integration with [pre-commit](https://github.com/pre-commit/pre-commit) for identifying simple issues before submission to code review
|
||||||
|
|
||||||
|
## Optional Integrations
|
||||||
|
|
||||||
|
*These features can be enabled during initial project setup.*
|
||||||
|
|
||||||
|
- Serve static files from Amazon S3, Google Cloud Storage, Azure Storage or [Whitenoise](https://whitenoise.readthedocs.io/)
|
||||||
|
- Configuration for [Celery](https://docs.celeryq.dev) and [Flower](https://github.com/mher/flower) (the latter in Docker setup only)
|
||||||
|
- Integration with [MailHog](https://github.com/mailhog/MailHog) for local email testing
|
||||||
|
- Integration with [Sentry](https://sentry.io/welcome/) for error logging
|
||||||
|
|
||||||
|
## Constraints
|
||||||
|
|
||||||
|
- Only maintained 3rd party libraries are used.
|
||||||
|
- Uses PostgreSQL everywhere: 10.19 - 14.1 ([MySQL fork](https://github.com/mabdullahadeel/cookiecutter-django-mysql) also available).
|
||||||
|
- Environment variables for configuration (This won't work with Apache/mod_wsgi).
|
||||||
|
|
||||||
|
## Support this Project!
|
||||||
|
|
||||||
|
This project is run by volunteers. Please support them in their efforts to maintain and improve Cookiecutter Django:
|
||||||
|
|
||||||
|
- Daniel Roy Greenfeld, Project Lead ([GitHub](https://github.com/pydanny), [Patreon](https://www.patreon.com/danielroygreenfeld)): expertise in Django and AWS ELB.
|
||||||
|
- Nikita Shupeyko, Core Developer ([GitHub](https://github.com/webyneter)): expertise in Python/Django, hands-on DevOps and frontend experience.
|
||||||
|
|
||||||
|
Projects that provide financial support to the maintainers:
|
||||||
|
|
||||||
|
------------------------------------------------------------------------
|
||||||
|
|
||||||
|
<p align="center">
|
||||||
|
<a href="https://www.feldroy.com/products//two-scoops-of-django-3-x"><img src="https://cdn.shopify.com/s/files/1/0304/6901/products/Two-Scoops-of-Django-3-Alpha-Cover_540x_26507b15-e489-470b-8a97-02773dd498d1_1080x.jpg"></a>
|
||||||
|
</p>
|
||||||
|
|
||||||
|
Two Scoops of Django 3.x is the best ice cream-themed Django reference in the universe!
|
||||||
|
|
||||||
|
### PyUp
|
||||||
|
|
||||||
|
<p align="center">
|
||||||
|
<a href="https://pyup.io/"><img src="https://pyup.io/static/images/logo.png"></a>
|
||||||
|
</p>
|
||||||
|
|
||||||
|
PyUp brings you automated security and dependency updates used by Google and other organizations. Free for open source projects!
|
||||||
|
|
||||||
|
## Usage
|
||||||
|
|
||||||
|
Let's pretend you want to create a Django project called "redditclone". Rather than using `startproject`
|
||||||
|
and then editing the results to include your name, email, and various configuration issues that always get forgotten until the worst possible moment, get [cookiecutter](https://github.com/cookiecutter/cookiecutter) to do all the work.
|
||||||
|
|
||||||
|
First, get Cookiecutter. Trust me, it's awesome:
|
||||||
|
|
||||||
|
$ pip install "cookiecutter>=1.7.0"
|
||||||
|
|
||||||
|
Now run it against this repo:
|
||||||
|
|
||||||
|
$ cookiecutter https://github.com/cookiecutter/cookiecutter-django
|
||||||
|
|
||||||
|
You'll be prompted for some values. Provide them, then a Django project will be created for you.
|
||||||
|
|
||||||
|
**Warning**: After this point, change 'Daniel Greenfeld', 'pydanny', etc to your own information.
|
||||||
|
|
||||||
|
Answer the prompts with your own desired [options](http://cookiecutter-django.readthedocs.io/en/latest/project-generation-options.html). For example:
|
||||||
|
|
||||||
|
Cloning into 'cookiecutter-django'...
|
||||||
|
remote: Counting objects: 550, done.
|
||||||
|
remote: Compressing objects: 100% (310/310), done.
|
||||||
|
remote: Total 550 (delta 283), reused 479 (delta 222)
|
||||||
|
Receiving objects: 100% (550/550), 127.66 KiB | 58 KiB/s, done.
|
||||||
|
Resolving deltas: 100% (283/283), done.
|
||||||
|
project_name [My Awesome Project]: Reddit Clone
|
||||||
|
project_slug [reddit_clone]: reddit
|
||||||
|
description [Behold My Awesome Project!]: A reddit clone.
|
||||||
|
author_name [Daniel Roy Greenfeld]: Daniel Greenfeld
|
||||||
|
domain_name [example.com]: myreddit.com
|
||||||
|
email [daniel-greenfeld@example.com]: pydanny@gmail.com
|
||||||
|
version [0.1.0]: 0.0.1
|
||||||
|
Select open_source_license:
|
||||||
|
1 - MIT
|
||||||
|
2 - BSD
|
||||||
|
3 - GPLv3
|
||||||
|
4 - Apache Software License 2.0
|
||||||
|
5 - Not open source
|
||||||
|
Choose from 1, 2, 3, 4, 5 [1]: 1
|
||||||
|
timezone [UTC]: America/Los_Angeles
|
||||||
|
windows [n]: n
|
||||||
|
use_pycharm [n]: y
|
||||||
|
use_docker [n]: n
|
||||||
|
Select postgresql_version:
|
||||||
|
1 - 14
|
||||||
|
2 - 13
|
||||||
|
3 - 12
|
||||||
|
4 - 11
|
||||||
|
5 - 10
|
||||||
|
Choose from 1, 2, 3, 4, 5 [1]: 1
|
||||||
|
Select cloud_provider:
|
||||||
|
1 - AWS
|
||||||
|
2 - GCP
|
||||||
|
3 - None
|
||||||
|
Choose from 1, 2, 3 [1]: 1
|
||||||
|
Select mail_service:
|
||||||
|
1 - Mailgun
|
||||||
|
2 - Amazon SES
|
||||||
|
3 - Mailjet
|
||||||
|
4 - Mandrill
|
||||||
|
5 - Postmark
|
||||||
|
6 - Sendgrid
|
||||||
|
7 - SendinBlue
|
||||||
|
8 - SparkPost
|
||||||
|
9 - Other SMTP
|
||||||
|
Choose from 1, 2, 3, 4, 5, 6, 7, 8, 9 [1]: 1
|
||||||
|
use_async [n]: n
|
||||||
|
use_drf [n]: y
|
||||||
|
Select frontend_pipeline:
|
||||||
|
1 - None
|
||||||
|
2 - Django Compressor
|
||||||
|
3 - Gulp
|
||||||
|
4 - Webpack
|
||||||
|
Choose from 1, 2, 3, 4 [1]: 1
|
||||||
|
use_celery [n]: y
|
||||||
|
use_mailhog [n]: n
|
||||||
|
use_sentry [n]: y
|
||||||
|
use_whitenoise [n]: n
|
||||||
|
use_heroku [n]: y
|
||||||
|
Select ci_tool:
|
||||||
|
1 - None
|
||||||
|
2 - Travis
|
||||||
|
3 - Gitlab
|
||||||
|
4 - Github
|
||||||
|
Choose from 1, 2, 3, 4 [1]: 4
|
||||||
|
keep_local_envs_in_vcs [y]: y
|
||||||
|
debug [n]: n
|
||||||
|
|
||||||
|
Enter the project and take a look around:
|
||||||
|
|
||||||
|
$ cd reddit/
|
||||||
|
$ ls
|
||||||
|
|
||||||
|
Create a git repo and push it there:
|
||||||
|
|
||||||
|
$ git init
|
||||||
|
$ git add .
|
||||||
|
$ git commit -m "first awesome commit"
|
||||||
|
$ git remote add origin git@github.com:pydanny/redditclone.git
|
||||||
|
$ git push -u origin master
|
||||||
|
|
||||||
|
Now take a look at your repo. Don't forget to carefully look at the generated README. Awesome, right?
|
||||||
|
|
||||||
|
For local development, see the following:
|
||||||
|
|
||||||
|
- [Developing locally](http://cookiecutter-django.readthedocs.io/en/latest/developing-locally.html)
|
||||||
|
- [Developing locally using docker](http://cookiecutter-django.readthedocs.io/en/latest/developing-locally-docker.html)
|
||||||
|
|
||||||
|
## Community
|
||||||
|
|
||||||
|
- Have questions? **Before you ask questions anywhere else**, please post your question on [Stack Overflow](http://stackoverflow.com/questions/tagged/cookiecutter-django) under the *cookiecutter-django* tag. We check there periodically for questions.
|
||||||
|
- If you think you found a bug or want to request a feature, please open an [issue](https://github.com/cookiecutter/cookiecutter-django/issues).
|
||||||
|
- For anything else, you can chat with us on [Discord](https://discord.gg/uFXweDQc5a).
|
||||||
|
|
||||||
|
## For Readers of Two Scoops of Django
|
||||||
|
|
||||||
|
You may notice that some elements of this project do not exactly match what we describe in chapter 3. The reason for that is this project, amongst other things, serves as a test bed for trying out new ideas and concepts. Sometimes they work, sometimes they don't, but the end result is that it won't necessarily match precisely what is described in the book I co-authored.
|
||||||
|
|
||||||
|
## For PyUp Users
|
||||||
|
|
||||||
|
If you are using [PyUp](https://pyup.io) to keep your dependencies updated and secure, use the code *cookiecutter* during checkout to get 15% off every month.
|
||||||
|
|
||||||
|
## "Your Stuff"
|
||||||
|
|
||||||
|
Scattered throughout the Python and HTML of this project are places marked with "your stuff". This is where third-party libraries are to be integrated with your project.
|
||||||
|
|
||||||
|
## For MySQL users
|
||||||
|
To get full MySQL support in addition to the default Postgresql, you can use this fork of the cookiecutter-django:
|
||||||
|
https://github.com/mabdullahadeel/cookiecutter-django-mysql
|
||||||
|
|
||||||
|
## Releases
|
||||||
|
|
||||||
|
Need a stable release? You can find them at <https://github.com/cookiecutter/cookiecutter-django/releases>
|
||||||
|
|
||||||
|
## Not Exactly What You Want?
|
||||||
|
|
||||||
|
This is what I want. *It might not be what you want.* Don't worry, you have options:
|
||||||
|
|
||||||
|
### Fork This
|
||||||
|
|
||||||
|
If you have differences in your preferred setup, I encourage you to fork this to create your own version.
|
||||||
|
Once you have your fork working, let me know and I'll add it to a '*Similar Cookiecutter Templates*' list here.
|
||||||
|
It's up to you whether to rename your fork.
|
||||||
|
|
||||||
|
If you do rename your fork, I encourage you to submit it to the following places:
|
||||||
|
|
||||||
|
- [cookiecutter](https://github.com/cookiecutter/cookiecutter) so it gets listed in the README as a template.
|
||||||
|
- The cookiecutter [grid](https://www.djangopackages.com/grids/g/cookiecutters/) on Django Packages.
|
||||||
|
|
||||||
|
### Submit a Pull Request
|
||||||
|
|
||||||
|
We accept pull requests if they're small, atomic, and make our own project development
|
||||||
|
experience better.
|
||||||
|
|
||||||
|
## Articles
|
||||||
|
|
||||||
|
- [Cookiecutter Django With Amazon RDS](https://haseeburrehman.com/posts/cookiecutter-django-with-amazon-rds/) - Apr, 2, 2021
|
||||||
|
- [Using cookiecutter-django with Google Cloud Storage](https://ahhda.github.io/cloud/gce/django/2019/03/12/using-django-cookiecutter-cloud-storage.html) - Mar. 12, 2019
|
||||||
|
- [cookiecutter-django with Nginx, Route 53 and ELB](https://msaizar.com/blog/cookiecutter-django-nginx-route-53-and-elb/) - Feb. 12, 2018
|
||||||
|
- [cookiecutter-django and Amazon RDS](https://msaizar.com/blog/cookiecutter-django-and-amazon-rds/) - Feb. 7, 2018
|
||||||
|
- [Using Cookiecutter to Jumpstart a Django Project on Windows with PyCharm](https://joshuahunter.com/posts/using-cookiecutter-to-jumpstart-a-django-project-on-windows-with-pycharm/) - May 19, 2017
|
||||||
|
- [Exploring with Cookiecutter](http://www.snowboardingcoder.com/django/2016/12/03/exploring-with-cookiecutter/) - Dec. 3, 2016
|
||||||
|
- [Introduction to Cookiecutter-Django](http://krzysztofzuraw.com/blog/2016/django-cookiecutter.html) - Feb. 19, 2016
|
||||||
|
- [Django and GitLab - Running Continuous Integration and tests with your FREE account](http://dezoito.github.io/2016/05/11/django-gitlab-continuous-integration-phantomjs.html) - May. 11, 2016
|
||||||
|
- [Development and Deployment of Cookiecutter-Django on Fedora](https://realpython.com/blog/python/development-and-deployment-of-cookiecutter-django-on-fedora/) - Jan. 18, 2016
|
||||||
|
- [Development and Deployment of Cookiecutter-Django via Docker](https://realpython.com/blog/python/development-and-deployment-of-cookiecutter-django-via-docker/) - Dec. 29, 2015
|
||||||
|
- [How to create a Django Application using Cookiecutter and Django 1.8](https://www.swapps.io/blog/how-to-create-a-django-application-using-cookiecutter-and-django-1-8/) - Sept. 12, 2015
|
||||||
|
|
||||||
|
Have a blog or online publication? Write about your cookiecutter-django tips and tricks, then send us a pull request with the link.
|
322
README.rst
322
README.rst
|
@ -1,322 +0,0 @@
|
||||||
Cookiecutter Django
|
|
||||||
=======================
|
|
||||||
|
|
||||||
.. image:: https://travis-ci.org/pydanny/cookiecutter-django.svg?branch=master
|
|
||||||
:target: https://travis-ci.org/pydanny/cookiecutter-django?branch=master
|
|
||||||
:alt: Build Status
|
|
||||||
|
|
||||||
.. image:: https://pyup.io/repos/github/pydanny/cookiecutter-django/shield.svg
|
|
||||||
:target: https://pyup.io/repos/github/pydanny/cookiecutter-django/
|
|
||||||
:alt: Updates
|
|
||||||
|
|
||||||
.. image:: https://img.shields.io/badge/cookiecutter-Join%20on%20Slack-green?style=flat&logo=slack
|
|
||||||
:target: https://join.slack.com/t/cookie-cutter/shared_invite/enQtNzI0Mzg5NjE5Nzk5LTRlYWI2YTZhYmQ4YmU1Y2Q2NmE1ZjkwOGM0NDQyNTIwY2M4ZTgyNDVkNjMxMDdhZGI5ZGE5YmJjM2M3ODJlY2U
|
|
||||||
|
|
||||||
.. image:: https://www.codetriage.com/pydanny/cookiecutter-django/badges/users.svg
|
|
||||||
:target: https://www.codetriage.com/pydanny/cookiecutter-django
|
|
||||||
:alt: Code Helpers Badge
|
|
||||||
|
|
||||||
.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
|
|
||||||
:target: https://github.com/ambv/black
|
|
||||||
:alt: Code style: black
|
|
||||||
|
|
||||||
Powered by Cookiecutter_, Cookiecutter Django is a framework for jumpstarting
|
|
||||||
production-ready Django projects quickly.
|
|
||||||
|
|
||||||
* Documentation: https://cookiecutter-django.readthedocs.io/en/latest/
|
|
||||||
* See Troubleshooting_ for common errors and obstacles
|
|
||||||
* If you have problems with Cookiecutter Django, please open issues_ don't send
|
|
||||||
emails to the maintainers.
|
|
||||||
|
|
||||||
.. _Troubleshooting: https://cookiecutter-django.readthedocs.io/en/latest/troubleshooting.html
|
|
||||||
|
|
||||||
.. _528: https://github.com/pydanny/cookiecutter-django/issues/528#issuecomment-212650373
|
|
||||||
.. _issues: https://github.com/pydanny/cookiecutter-django/issues/new
|
|
||||||
|
|
||||||
Features
|
|
||||||
---------
|
|
||||||
|
|
||||||
* For Django 3.0
|
|
||||||
* Works with Python 3.8
|
|
||||||
* Renders Django projects with 100% starting test coverage
|
|
||||||
* Twitter Bootstrap_ v4 (`maintained Foundation fork`_ also available)
|
|
||||||
* 12-Factor_ based settings via django-environ_
|
|
||||||
* Secure by default. We believe in SSL.
|
|
||||||
* Optimized development and production settings
|
|
||||||
* Registration via django-allauth_
|
|
||||||
* Comes with custom user model ready to go
|
|
||||||
* Optional basic ASGI setup for Websockets
|
|
||||||
* Optional custom static build using Gulp and livereload
|
|
||||||
* Send emails via Anymail_ (using Mailgun_ by default or Amazon SES if AWS is selected cloud provider, but switchable)
|
|
||||||
* Media storage using Amazon S3 or Google Cloud Storage
|
|
||||||
* Docker support using docker-compose_ for development and production (using Traefik_ with LetsEncrypt_ support)
|
|
||||||
* Procfile_ for deploying to Heroku
|
|
||||||
* Instructions for deploying to PythonAnywhere_
|
|
||||||
* Run tests with unittest or pytest
|
|
||||||
* Customizable PostgreSQL version
|
|
||||||
* Default integration with pre-commit_ for identifying simple issues before submission to code review
|
|
||||||
|
|
||||||
.. _`maintained Foundation fork`: https://github.com/Parbhat/cookiecutter-django-foundation
|
|
||||||
|
|
||||||
|
|
||||||
Optional Integrations
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
*These features can be enabled during initial project setup.*
|
|
||||||
|
|
||||||
* Serve static files from Amazon S3, Google Cloud Storage or Whitenoise_
|
|
||||||
* Configuration for Celery_ and Flower_ (the latter in Docker setup only)
|
|
||||||
* Integration with MailHog_ for local email testing
|
|
||||||
* Integration with Sentry_ for error logging
|
|
||||||
|
|
||||||
.. _Bootstrap: https://github.com/twbs/bootstrap
|
|
||||||
.. _django-environ: https://github.com/joke2k/django-environ
|
|
||||||
.. _12-Factor: http://12factor.net/
|
|
||||||
.. _django-allauth: https://github.com/pennersr/django-allauth
|
|
||||||
.. _django-avatar: https://github.com/grantmcconnaughey/django-avatar
|
|
||||||
.. _Procfile: https://devcenter.heroku.com/articles/procfile
|
|
||||||
.. _Mailgun: http://www.mailgun.com/
|
|
||||||
.. _Whitenoise: https://whitenoise.readthedocs.io/
|
|
||||||
.. _Celery: http://www.celeryproject.org/
|
|
||||||
.. _Flower: https://github.com/mher/flower
|
|
||||||
.. _Anymail: https://github.com/anymail/django-anymail
|
|
||||||
.. _MailHog: https://github.com/mailhog/MailHog
|
|
||||||
.. _Sentry: https://sentry.io/welcome/
|
|
||||||
.. _docker-compose: https://github.com/docker/compose
|
|
||||||
.. _PythonAnywhere: https://www.pythonanywhere.com/
|
|
||||||
.. _Traefik: https://traefik.io/
|
|
||||||
.. _LetsEncrypt: https://letsencrypt.org/
|
|
||||||
.. _pre-commit: https://github.com/pre-commit/pre-commit
|
|
||||||
|
|
||||||
Constraints
|
|
||||||
-----------
|
|
||||||
|
|
||||||
* Only maintained 3rd party libraries are used.
|
|
||||||
* Uses PostgreSQL everywhere (9.4 - 11.3)
|
|
||||||
* Environment variables for configuration (This won't work with Apache/mod_wsgi).
|
|
||||||
|
|
||||||
Support this Project!
|
|
||||||
----------------------
|
|
||||||
|
|
||||||
This project is run by volunteers. Please support them in their efforts to maintain and improve Cookiecutter Django:
|
|
||||||
|
|
||||||
* Daniel Roy Greenfeld, Project Lead (`GitHub <https://github.com/pydanny>`_, `Patreon <https://www.patreon.com/danielroygreenfeld>`_): expertise in Django and AWS ELB.
|
|
||||||
|
|
||||||
* Nikita Shupeyko, Core Developer (`GitHub <https://github.com/webyneter>`_): expertise in Python/Django, hands-on DevOps and frontend experience.
|
|
||||||
|
|
||||||
Projects that provide financial support to the maintainers:
|
|
||||||
|
|
||||||
Django Crash Course
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
.. image:: https://cdn.shopify.com/s/files/1/0304/6901/files/Django-Crash-Course-300x436.jpg
|
|
||||||
:name: Django Crash Course: Covers Django 3.0 and Python 3.8
|
|
||||||
:align: center
|
|
||||||
:alt: Django Crash Course
|
|
||||||
:target: https://www.roygreenfeld.com/products/django-crash-course
|
|
||||||
|
|
||||||
Django Crash Course for Django 3.0 and Python 3.8 is the best cheese-themed Django reference in the universe!
|
|
||||||
|
|
||||||
pyup
|
|
||||||
~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
.. image:: https://pyup.io/static/images/logo.png
|
|
||||||
:name: pyup
|
|
||||||
:align: center
|
|
||||||
:alt: pyup
|
|
||||||
:target: https://pyup.io/
|
|
||||||
|
|
||||||
Pyup brings you automated security and dependency updates used by Google and other organizations. Free for open source projects!
|
|
||||||
|
|
||||||
Usage
|
|
||||||
------
|
|
||||||
|
|
||||||
Let's pretend you want to create a Django project called "redditclone". Rather than using ``startproject``
|
|
||||||
and then editing the results to include your name, email, and various configuration issues that always get forgotten until the worst possible moment, get cookiecutter_ to do all the work.
|
|
||||||
|
|
||||||
First, get Cookiecutter. Trust me, it's awesome::
|
|
||||||
|
|
||||||
$ pip install "cookiecutter>=1.7.0"
|
|
||||||
|
|
||||||
Now run it against this repo::
|
|
||||||
|
|
||||||
$ cookiecutter https://github.com/pydanny/cookiecutter-django
|
|
||||||
|
|
||||||
You'll be prompted for some values. Provide them, then a Django project will be created for you.
|
|
||||||
|
|
||||||
**Warning**: After this point, change 'Daniel Greenfeld', 'pydanny', etc to your own information.
|
|
||||||
|
|
||||||
Answer the prompts with your own desired options_. For example::
|
|
||||||
|
|
||||||
Cloning into 'cookiecutter-django'...
|
|
||||||
remote: Counting objects: 550, done.
|
|
||||||
remote: Compressing objects: 100% (310/310), done.
|
|
||||||
remote: Total 550 (delta 283), reused 479 (delta 222)
|
|
||||||
Receiving objects: 100% (550/550), 127.66 KiB | 58 KiB/s, done.
|
|
||||||
Resolving deltas: 100% (283/283), done.
|
|
||||||
project_name [Project Name]: Reddit Clone
|
|
||||||
project_slug [reddit_clone]: reddit
|
|
||||||
author_name [Daniel Roy Greenfeld]: Daniel Greenfeld
|
|
||||||
email [you@example.com]: pydanny@gmail.com
|
|
||||||
description [Behold My Awesome Project!]: A reddit clone.
|
|
||||||
domain_name [example.com]: myreddit.com
|
|
||||||
version [0.1.0]: 0.0.1
|
|
||||||
timezone [UTC]: America/Los_Angeles
|
|
||||||
use_whitenoise [n]: n
|
|
||||||
use_celery [n]: y
|
|
||||||
use_mailhog [n]: n
|
|
||||||
use_sentry [n]: y
|
|
||||||
use_pycharm [n]: y
|
|
||||||
windows [n]: n
|
|
||||||
use_docker [n]: n
|
|
||||||
use_heroku [n]: y
|
|
||||||
use_compressor [n]: y
|
|
||||||
Select postgresql_version:
|
|
||||||
1 - 11.3
|
|
||||||
2 - 10.8
|
|
||||||
3 - 9.6
|
|
||||||
4 - 9.5
|
|
||||||
5 - 9.4
|
|
||||||
Choose from 1, 2, 3, 4, 5 [1]: 1
|
|
||||||
Select js_task_runner:
|
|
||||||
1 - None
|
|
||||||
2 - Gulp
|
|
||||||
Choose from 1, 2 [1]: 1
|
|
||||||
Select cloud_provider:
|
|
||||||
1 - AWS
|
|
||||||
2 - GCP
|
|
||||||
3 - None
|
|
||||||
Choose from 1, 2, 3 [1]: 1
|
|
||||||
custom_bootstrap_compilation [n]: n
|
|
||||||
Select open_source_license:
|
|
||||||
1 - MIT
|
|
||||||
2 - BSD
|
|
||||||
3 - GPLv3
|
|
||||||
4 - Apache Software License 2.0
|
|
||||||
5 - Not open source
|
|
||||||
Choose from 1, 2, 3, 4, 5 [1]: 1
|
|
||||||
keep_local_envs_in_vcs [y]: y
|
|
||||||
debug[n]: n
|
|
||||||
|
|
||||||
Enter the project and take a look around::
|
|
||||||
|
|
||||||
$ cd reddit/
|
|
||||||
$ ls
|
|
||||||
|
|
||||||
Create a git repo and push it there::
|
|
||||||
|
|
||||||
$ git init
|
|
||||||
$ git add .
|
|
||||||
$ git commit -m "first awesome commit"
|
|
||||||
$ git remote add origin git@github.com:pydanny/redditclone.git
|
|
||||||
$ git push -u origin master
|
|
||||||
|
|
||||||
Now take a look at your repo. Don't forget to carefully look at the generated README. Awesome, right?
|
|
||||||
|
|
||||||
For local development, see the following:
|
|
||||||
|
|
||||||
* `Developing locally`_
|
|
||||||
* `Developing locally using docker`_
|
|
||||||
|
|
||||||
.. _options: http://cookiecutter-django.readthedocs.io/en/latest/project-generation-options.html
|
|
||||||
.. _`Developing locally`: http://cookiecutter-django.readthedocs.io/en/latest/developing-locally.html
|
|
||||||
.. _`Developing locally using docker`: http://cookiecutter-django.readthedocs.io/en/latest/developing-locally-docker.html
|
|
||||||
|
|
||||||
Community
|
|
||||||
-----------
|
|
||||||
|
|
||||||
* Have questions? **Before you ask questions anywhere else**, please post your question on `Stack Overflow`_ under the *cookiecutter-django* tag. We check there periodically for questions.
|
|
||||||
* If you think you found a bug or want to request a feature, please open an issue_.
|
|
||||||
* For anything else, you can chat with us on `Slack`_.
|
|
||||||
|
|
||||||
.. _`Stack Overflow`: http://stackoverflow.com/questions/tagged/cookiecutter-django
|
|
||||||
.. _`issue`: https://github.com/pydanny/cookiecutter-django/issues
|
|
||||||
.. _`Slack`: https://join.slack.com/t/cookie-cutter/shared_invite/enQtNzI0Mzg5NjE5Nzk5LTRlYWI2YTZhYmQ4YmU1Y2Q2NmE1ZjkwOGM0NDQyNTIwY2M4ZTgyNDVkNjMxMDdhZGI5ZGE5YmJjM2M3ODJlY2U
|
|
||||||
|
|
||||||
For Readers of Two Scoops of Django
|
|
||||||
--------------------------------------------
|
|
||||||
|
|
||||||
You may notice that some elements of this project do not exactly match what we describe in chapter 3. The reason for that is this project, amongst other things, serves as a test bed for trying out new ideas and concepts. Sometimes they work, sometimes they don't, but the end result is that it won't necessarily match precisely what is described in the book I co-authored.
|
|
||||||
|
|
||||||
For pyup.io Users
|
|
||||||
-----------------
|
|
||||||
|
|
||||||
If you are using `pyup.io`_ to keep your dependencies updated and secure, use the code *cookiecutter* during checkout to get 15% off every month.
|
|
||||||
|
|
||||||
.. _`pyup.io`: https://pyup.io
|
|
||||||
|
|
||||||
"Your Stuff"
|
|
||||||
-------------
|
|
||||||
|
|
||||||
Scattered throughout the Python and HTML of this project are places marked with "your stuff". This is where third-party libraries are to be integrated with your project.
|
|
||||||
|
|
||||||
Releases
|
|
||||||
--------
|
|
||||||
|
|
||||||
Need a stable release? You can find them at https://github.com/pydanny/cookiecutter-django/releases
|
|
||||||
|
|
||||||
|
|
||||||
Not Exactly What You Want?
|
|
||||||
---------------------------
|
|
||||||
|
|
||||||
This is what I want. *It might not be what you want.* Don't worry, you have options:
|
|
||||||
|
|
||||||
Fork This
|
|
||||||
~~~~~~~~~~
|
|
||||||
|
|
||||||
If you have differences in your preferred setup, I encourage you to fork this to create your own version.
|
|
||||||
Once you have your fork working, let me know and I'll add it to a '*Similar Cookiecutter Templates*' list here.
|
|
||||||
It's up to you whether or not to rename your fork.
|
|
||||||
|
|
||||||
If you do rename your fork, I encourage you to submit it to the following places:
|
|
||||||
|
|
||||||
* cookiecutter_ so it gets listed in the README as a template.
|
|
||||||
* The cookiecutter grid_ on Django Packages.
|
|
||||||
|
|
||||||
.. _cookiecutter: https://github.com/cookiecutter/cookiecutter
|
|
||||||
.. _grid: https://www.djangopackages.com/grids/g/cookiecutters/
|
|
||||||
|
|
||||||
Submit a Pull Request
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
We accept pull requests if they're small, atomic, and make our own project development
|
|
||||||
experience better.
|
|
||||||
|
|
||||||
Articles
|
|
||||||
---------
|
|
||||||
|
|
||||||
* `Complete Walkthrough: Blue/Green Deployment to AWS ECS using GitHub actions`_ - June 10, 2020
|
|
||||||
* `Using cookiecutter-django with Google Cloud Storage`_ - Mar. 12, 2019
|
|
||||||
* `cookiecutter-django with Nginx, Route 53 and ELB`_ - Feb. 12, 2018
|
|
||||||
* `cookiecutter-django and Amazon RDS`_ - Feb. 7, 2018
|
|
||||||
* `Using Cookiecutter to Jumpstart a Django Project on Windows with PyCharm`_ - May 19, 2017
|
|
||||||
* `Exploring with Cookiecutter`_ - Dec. 3, 2016
|
|
||||||
* `Introduction to Cookiecutter-Django`_ - Feb. 19, 2016
|
|
||||||
* `Django and GitLab - Running Continuous Integration and tests with your FREE account`_ - May. 11, 2016
|
|
||||||
* `Development and Deployment of Cookiecutter-Django on Fedora`_ - Jan. 18, 2016
|
|
||||||
* `Development and Deployment of Cookiecutter-Django via Docker`_ - Dec. 29, 2015
|
|
||||||
* `How to create a Django Application using Cookiecutter and Django 1.8`_ - Sept. 12, 2015
|
|
||||||
|
|
||||||
Have a blog or online publication? Write about your cookiecutter-django tips and tricks, then send us a pull request with the link.
|
|
||||||
|
|
||||||
.. _`Complete Walkthrough: Blue/Green Deployment to AWS ECS using GitHub actions`: https://github.com/Andrew-Chen-Wang/cookiecutter-django-ecs-github
|
|
||||||
.. _`Using cookiecutter-django with Google Cloud Storage`: https://ahhda.github.io/cloud/gce/django/2019/03/12/using-django-cookiecutter-cloud-storage.html
|
|
||||||
.. _`cookiecutter-django with Nginx, Route 53 and ELB`: https://msaizar.com/blog/cookiecutter-django-nginx-route-53-and-elb/
|
|
||||||
.. _`cookiecutter-django and Amazon RDS`: https://msaizar.com/blog/cookiecutter-django-and-amazon-rds/
|
|
||||||
.. _`Exploring with Cookiecutter`: http://www.snowboardingcoder.com/django/2016/12/03/exploring-with-cookiecutter/
|
|
||||||
.. _`Using Cookiecutter to Jumpstart a Django Project on Windows with PyCharm`: https://joshuahunter.com/posts/using-cookiecutter-to-jumpstart-a-django-project-on-windows-with-pycharm/
|
|
||||||
|
|
||||||
.. _`Development and Deployment of Cookiecutter-Django via Docker`: https://realpython.com/blog/python/development-and-deployment-of-cookiecutter-django-via-docker/
|
|
||||||
.. _`Development and Deployment of Cookiecutter-Django on Fedora`: https://realpython.com/blog/python/development-and-deployment-of-cookiecutter-django-on-fedora/
|
|
||||||
.. _`How to create a Django Application using Cookiecutter and Django 1.8`: https://www.swapps.io/blog/how-to-create-a-django-application-using-cookiecutter-and-django-1-8/
|
|
||||||
.. _`Introduction to Cookiecutter-Django`: http://krzysztofzuraw.com/blog/2016/django-cookiecutter.html
|
|
||||||
.. _`Django and GitLab - Running Continuous Integration and tests with your FREE account`: http://dezoito.github.io/2016/05/11/django-gitlab-continuous-integration-phantomjs.html
|
|
||||||
|
|
||||||
Code of Conduct
|
|
||||||
---------------
|
|
||||||
|
|
||||||
Everyone interacting in the Cookiecutter project's codebases, issue trackers, chat
|
|
||||||
rooms, and mailing lists is expected to follow the `PyPA Code of Conduct`_.
|
|
||||||
|
|
||||||
|
|
||||||
.. _`PyPA Code of Conduct`: https://www.pypa.io/en/latest/code-of-conduct/
|
|
|
@ -18,19 +18,16 @@
|
||||||
"use_pycharm": "n",
|
"use_pycharm": "n",
|
||||||
"use_docker": "n",
|
"use_docker": "n",
|
||||||
"postgresql_version": [
|
"postgresql_version": [
|
||||||
"11.3",
|
"14",
|
||||||
"10.8",
|
"13",
|
||||||
"9.6",
|
"12",
|
||||||
"9.5",
|
"11",
|
||||||
"9.4"
|
"10"
|
||||||
],
|
|
||||||
"js_task_runner": [
|
|
||||||
"None",
|
|
||||||
"Gulp"
|
|
||||||
],
|
],
|
||||||
"cloud_provider": [
|
"cloud_provider": [
|
||||||
"AWS",
|
"AWS",
|
||||||
"GCP",
|
"GCP",
|
||||||
|
"Azure",
|
||||||
"None"
|
"None"
|
||||||
],
|
],
|
||||||
"mail_service": [
|
"mail_service": [
|
||||||
|
@ -46,8 +43,12 @@
|
||||||
],
|
],
|
||||||
"use_async": "n",
|
"use_async": "n",
|
||||||
"use_drf": "n",
|
"use_drf": "n",
|
||||||
"custom_bootstrap_compilation": "n",
|
"frontend_pipeline": [
|
||||||
"use_compressor": "n",
|
"None",
|
||||||
|
"Django Compressor",
|
||||||
|
"Gulp",
|
||||||
|
"Webpack"
|
||||||
|
],
|
||||||
"use_celery": "n",
|
"use_celery": "n",
|
||||||
"use_mailhog": "n",
|
"use_mailhog": "n",
|
||||||
"use_sentry": "n",
|
"use_sentry": "n",
|
||||||
|
@ -56,9 +57,9 @@
|
||||||
"ci_tool": [
|
"ci_tool": [
|
||||||
"None",
|
"None",
|
||||||
"Travis",
|
"Travis",
|
||||||
"Gitlab"
|
"Gitlab",
|
||||||
|
"Github"
|
||||||
],
|
],
|
||||||
"keep_local_envs_in_vcs": "y",
|
"keep_local_envs_in_vcs": "y",
|
||||||
|
|
||||||
"debug": "n"
|
"debug": "n"
|
||||||
}
|
}
|
||||||
|
|
10
docs/conf.py
10
docs/conf.py
|
@ -7,10 +7,7 @@
|
||||||
#
|
#
|
||||||
# All configuration values have a default; values that are commented out
|
# All configuration values have a default; values that are commented out
|
||||||
# serve to show the default.
|
# serve to show the default.
|
||||||
|
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
now = datetime.now()
|
now = datetime.now()
|
||||||
|
|
||||||
|
@ -42,7 +39,7 @@ master_doc = "index"
|
||||||
|
|
||||||
# General information about the project.
|
# General information about the project.
|
||||||
project = "Cookiecutter Django"
|
project = "Cookiecutter Django"
|
||||||
copyright = "2013-{}, Daniel Roy Greenfeld".format(now.year)
|
copyright = f"2013-{now.year}, Daniel Roy Greenfeld"
|
||||||
|
|
||||||
# The version info for the project you're documenting, acts as replacement for
|
# The version info for the project you're documenting, acts as replacement for
|
||||||
# |version| and |release|, also used in various other places throughout the
|
# |version| and |release|, also used in various other places throughout the
|
||||||
|
@ -92,7 +89,7 @@ pygments_style = "sphinx"
|
||||||
|
|
||||||
# The theme to use for HTML and HTML Help pages. See the documentation for
|
# The theme to use for HTML and HTML Help pages. See the documentation for
|
||||||
# a list of builtin themes.
|
# a list of builtin themes.
|
||||||
html_theme = "default"
|
html_theme = "sphinx_rtd_theme"
|
||||||
|
|
||||||
# Theme options are theme-specific and customize the look and feel of a theme
|
# Theme options are theme-specific and customize the look and feel of a theme
|
||||||
# further. For a list of options available for each theme, see the
|
# further. For a list of options available for each theme, see the
|
||||||
|
@ -242,7 +239,8 @@ texinfo_documents = [
|
||||||
"Cookiecutter Django documentation",
|
"Cookiecutter Django documentation",
|
||||||
"Daniel Roy Greenfeld",
|
"Daniel Roy Greenfeld",
|
||||||
"Cookiecutter Django",
|
"Cookiecutter Django",
|
||||||
"A Cookiecutter template for creating production-ready Django projects quickly.",
|
"A Cookiecutter template for creating production-ready "
|
||||||
|
"Django projects quickly.",
|
||||||
"Miscellaneous",
|
"Miscellaneous",
|
||||||
)
|
)
|
||||||
]
|
]
|
||||||
|
|
|
@ -3,23 +3,24 @@ Deployment on Heroku
|
||||||
|
|
||||||
.. index:: Heroku
|
.. index:: Heroku
|
||||||
|
|
||||||
Commands to run
|
Script
|
||||||
---------------
|
------
|
||||||
|
|
||||||
Run these commands to deploy the project to Heroku:
|
Run these commands to deploy the project to Heroku:
|
||||||
|
|
||||||
.. code-block:: bash
|
.. code-block:: bash
|
||||||
|
|
||||||
heroku create --buildpack https://github.com/heroku/heroku-buildpack-python
|
heroku create --buildpack heroku/python
|
||||||
|
|
||||||
heroku addons:create heroku-postgresql:hobby-dev
|
heroku addons:create heroku-postgresql:mini
|
||||||
# On Windows use double quotes for the time zone, e.g.
|
# On Windows use double quotes for the time zone, e.g.
|
||||||
# heroku pg:backups schedule --at "02:00 America/Los_Angeles" DATABASE_URL
|
# heroku pg:backups schedule --at "02:00 America/Los_Angeles" DATABASE_URL
|
||||||
heroku pg:backups schedule --at '02:00 America/Los_Angeles' DATABASE_URL
|
heroku pg:backups schedule --at '02:00 America/Los_Angeles' DATABASE_URL
|
||||||
heroku pg:promote DATABASE_URL
|
heroku pg:promote DATABASE_URL
|
||||||
|
|
||||||
heroku addons:create heroku-redis:hobby-dev
|
heroku addons:create heroku-redis:mini
|
||||||
|
|
||||||
|
# Assuming you chose Mailgun as mail service (see below for others)
|
||||||
heroku addons:create mailgun:starter
|
heroku addons:create mailgun:starter
|
||||||
|
|
||||||
heroku config:set PYTHONHASHSEED=random
|
heroku config:set PYTHONHASHSEED=random
|
||||||
|
@ -53,11 +54,25 @@ Run these commands to deploy the project to Heroku:
|
||||||
|
|
||||||
heroku open
|
heroku open
|
||||||
|
|
||||||
|
Notes
|
||||||
|
-----
|
||||||
|
|
||||||
|
Email Service
|
||||||
|
+++++++++++++
|
||||||
|
|
||||||
|
The script above assumes that you've chose Mailgun as email service. If you want to use another one, check the `documentation for django-anymail <https://anymail.readthedocs.io>`_ to know which environment variables to set. Heroku provides other `add-ons for emails <https://elements.heroku.com/addons#email-sms>`_ (e.g. Sendgrid) which can be configured with a similar one line command.
|
||||||
|
|
||||||
.. warning::
|
.. warning::
|
||||||
|
|
||||||
.. include:: mailgun.rst
|
.. include:: mailgun.rst
|
||||||
|
|
||||||
|
Heroku & Docker
|
||||||
|
+++++++++++++++
|
||||||
|
|
||||||
|
Although Heroku has some sort of `Docker support`_, it's not supported by cookiecutter-django.
|
||||||
|
We invite you to follow Heroku documentation about it.
|
||||||
|
|
||||||
|
.. _Docker support: https://devcenter.heroku.com/articles/build-docker-images-heroku-yml
|
||||||
|
|
||||||
Optional actions
|
Optional actions
|
||||||
----------------
|
----------------
|
||||||
|
@ -94,10 +109,10 @@ Or add the DSN for your account, if you already have one:
|
||||||
.. _Sentry add-on: https://elements.heroku.com/addons/sentry
|
.. _Sentry add-on: https://elements.heroku.com/addons/sentry
|
||||||
|
|
||||||
|
|
||||||
Gulp & Bootstrap compilation
|
Gulp or Webpack
|
||||||
++++++++++++++++++++++++++++
|
+++++++++++++++
|
||||||
|
|
||||||
If you've opted for a custom bootstrap build, you'll most likely need to setup
|
If you've opted for Gulp or Webpack as frontend pipeline, you'll most likely need to setup
|
||||||
your app to use `multiple buildpacks`_: one for Python & one for Node.js:
|
your app to use `multiple buildpacks`_: one for Python & one for Node.js:
|
||||||
|
|
||||||
.. code-block:: bash
|
.. code-block:: bash
|
||||||
|
@ -106,16 +121,8 @@ your app to use `multiple buildpacks`_: one for Python & one for Node.js:
|
||||||
|
|
||||||
At time of writing, this should do the trick: during deployment,
|
At time of writing, this should do the trick: during deployment,
|
||||||
the Heroku should run ``npm install`` and then ``npm build``,
|
the Heroku should run ``npm install`` and then ``npm build``,
|
||||||
which runs Gulp in cookiecutter-django.
|
which run the SASS compilation & JS bundling.
|
||||||
|
|
||||||
If things don't work, please refer to the Heroku docs.
|
If things don't work, please refer to the Heroku docs.
|
||||||
|
|
||||||
.. _multiple buildpacks: https://devcenter.heroku.com/articles/using-multiple-buildpacks-for-an-app
|
.. _multiple buildpacks: https://devcenter.heroku.com/articles/using-multiple-buildpacks-for-an-app
|
||||||
|
|
||||||
About Heroku & Docker
|
|
||||||
---------------------
|
|
||||||
|
|
||||||
Although Heroku has some sort of `Docker support`_, it's not supported by cookiecutter-django.
|
|
||||||
We invite you to follow Heroku documentation about it.
|
|
||||||
|
|
||||||
.. _Docker support: https://devcenter.heroku.com/articles/build-docker-images-heroku-yml
|
|
||||||
|
|
|
@ -15,7 +15,7 @@ Full instructions follow, but here's a high-level view.
|
||||||
|
|
||||||
2. Set your config variables in the *postactivate* script
|
2. Set your config variables in the *postactivate* script
|
||||||
|
|
||||||
3. Run the *manage.py* ``migrate`` and ``collectstatic`` {%- if cookiecutter.use_compressor == "y" %}and ``compress`` {%- endif %}commands
|
3. Run the *manage.py* ``migrate`` and ``collectstatic`` commands. If you've opted for django-compressor, also run ``compress``
|
||||||
|
|
||||||
4. Add an entry to the PythonAnywhere *Web tab*
|
4. Add an entry to the PythonAnywhere *Web tab*
|
||||||
|
|
||||||
|
@ -25,7 +25,6 @@ Full instructions follow, but here's a high-level view.
|
||||||
Once you've been through this one-off config, future deployments are much simpler: just ``git pull`` and then hit the "Reload" button :)
|
Once you've been through this one-off config, future deployments are much simpler: just ``git pull`` and then hit the "Reload" button :)
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
Getting your code and dependencies installed on PythonAnywhere
|
Getting your code and dependencies installed on PythonAnywhere
|
||||||
--------------------------------------------------------------
|
--------------------------------------------------------------
|
||||||
|
|
||||||
|
@ -35,11 +34,10 @@ Make sure your project is fully committed and pushed up to Bitbucket or Github o
|
||||||
|
|
||||||
git clone <my-repo-url> # you can also use hg
|
git clone <my-repo-url> # you can also use hg
|
||||||
cd my-project-name
|
cd my-project-name
|
||||||
mkvirtualenv --python=/usr/bin/python3.8 my-project-name
|
mkvirtualenv --python=/usr/bin/python3.10 my-project-name
|
||||||
pip install -r requirements/production.txt # may take a few minutes
|
pip install -r requirements/production.txt # may take a few minutes
|
||||||
|
|
||||||
|
|
||||||
|
|
||||||
Setting environment variables in the console
|
Setting environment variables in the console
|
||||||
--------------------------------------------
|
--------------------------------------------
|
||||||
|
|
||||||
|
@ -57,7 +55,7 @@ Set environment variables via the virtualenv "postactivate" script (this will se
|
||||||
|
|
||||||
vi $VIRTUAL_ENV/bin/postactivate
|
vi $VIRTUAL_ENV/bin/postactivate
|
||||||
|
|
||||||
**TIP:** *If you don't like vi, you can also edit this file via the PythonAnywhere "Files" menu; look in the ".virtualenvs" folder*.
|
.. note:: If you don't like vi, you can also edit this file via the PythonAnywhere "Files" menu; look in the ".virtualenvs" folder.
|
||||||
|
|
||||||
Add these exports
|
Add these exports
|
||||||
|
|
||||||
|
@ -73,13 +71,14 @@ Add these exports
|
||||||
export DJANGO_AWS_ACCESS_KEY_ID=
|
export DJANGO_AWS_ACCESS_KEY_ID=
|
||||||
export DJANGO_AWS_SECRET_ACCESS_KEY=
|
export DJANGO_AWS_SECRET_ACCESS_KEY=
|
||||||
export DJANGO_AWS_STORAGE_BUCKET_NAME=
|
export DJANGO_AWS_STORAGE_BUCKET_NAME=
|
||||||
export DATABASE_URL='<see below>'
|
export DATABASE_URL='<see Database setup section below>'
|
||||||
|
export REDIS_URL='<see Redis section below>'
|
||||||
|
|
||||||
**NOTE:** *The AWS details are not required if you're using whitenoise or the built-in pythonanywhere static files service, but you do need to set them to blank, as above.*
|
.. note:: The AWS details are not required if you're using whitenoise or the built-in pythonanywhere static files service, but you do need to set them to blank, as above.
|
||||||
|
|
||||||
|
|
||||||
Database setup:
|
Database setup
|
||||||
---------------
|
--------------
|
||||||
|
|
||||||
Go to the PythonAnywhere **Databases tab** and configure your database.
|
Go to the PythonAnywhere **Databases tab** and configure your database.
|
||||||
|
|
||||||
|
@ -109,18 +108,26 @@ Now run the migration, and collectstatic:
|
||||||
source $VIRTUAL_ENV/bin/postactivate
|
source $VIRTUAL_ENV/bin/postactivate
|
||||||
python manage.py migrate
|
python manage.py migrate
|
||||||
python manage.py collectstatic
|
python manage.py collectstatic
|
||||||
{%- if cookiecutter.use_compressor == "y" %}python manage.py compress {%- endif %}
|
# if using django-compressor:
|
||||||
|
python manage.py compress
|
||||||
# and, optionally
|
# and, optionally
|
||||||
python manage.py createsuperuser
|
python manage.py createsuperuser
|
||||||
|
|
||||||
|
|
||||||
|
Redis
|
||||||
|
-----
|
||||||
|
|
||||||
|
PythonAnywhere does NOT `offer a built-in solution <https://www.pythonanywhere.com/forums/topic/1666/>`_ for Redis, however the production setup from Cookiecutter Django uses Redis as cache and requires one.
|
||||||
|
|
||||||
|
We recommend to signup to a separate service offering hosted Redis (e.g. `Redislab <https://redis.com/>`_) and use the URL they provide.
|
||||||
|
|
||||||
|
|
||||||
Configure the PythonAnywhere Web Tab
|
Configure the PythonAnywhere Web Tab
|
||||||
------------------------------------
|
------------------------------------
|
||||||
|
|
||||||
Go to the PythonAnywhere **Web tab**, hit **Add new web app**, and choose **Manual Config**, and then the version of Python you used for your virtualenv.
|
Go to the PythonAnywhere **Web tab**, hit **Add new web app**, and choose **Manual Config**, and then the version of Python you used for your virtualenv.
|
||||||
|
|
||||||
**NOTE:** *If you're using a custom domain (not on \*.pythonanywhere.com), then you'll need to set up a CNAME with your domain registrar.*
|
.. note:: If you're using a custom domain (not on \*.pythonanywhere.com), then you'll need to set up a CNAME with your domain registrar.
|
||||||
|
|
||||||
When you're redirected back to the web app config screen, set the **path to your virtualenv**. If you used virtualenvwrapper as above, you can just enter its name.
|
When you're redirected back to the web app config screen, set the **path to your virtualenv**. If you used virtualenvwrapper as above, you can just enter its name.
|
||||||
|
|
||||||
|
@ -153,15 +160,14 @@ Click through to the **WSGI configuration file** link (near the top) and edit th
|
||||||
Back on the Web tab, hit **Reload**, and your app should be live!
|
Back on the Web tab, hit **Reload**, and your app should be live!
|
||||||
|
|
||||||
|
|
||||||
**NOTE:** *you may see security warnings until you set up your SSL certificates. If you
|
.. note:: You may see security warnings until you set up your SSL certificates. If you want to suppress them temporarily, set ``DJANGO_SECURE_SSL_REDIRECT`` to blank. Follow `these instructions <https://help.pythonanywhere.com/pages/HTTPSSetup>`_ to get SSL set up.
|
||||||
want to suppress them temporarily, set DJANGO_SECURE_SSL_REDIRECT to blank. Follow
|
|
||||||
the instructions here to get SSL set up: https://help.pythonanywhere.com/pages/SSLOwnDomains/*
|
|
||||||
|
|
||||||
|
|
||||||
Optional: static files
|
Optional: static files
|
||||||
----------------------
|
----------------------
|
||||||
|
|
||||||
If you want to use the PythonAnywhere static files service instead of using whitenoise or S3, you'll find its configuration section on the Web tab. Essentially you'll need an entry to match your ``STATIC_URL`` and ``STATIC_ROOT`` settings. There's more info here: https://help.pythonanywhere.com/pages/DjangoStaticFiles
|
If you want to use the PythonAnywhere static files service instead of using whitenoise or S3, you'll find its configuration section on the Web tab. Essentially you'll need an entry to match your ``STATIC_URL`` and ``STATIC_ROOT`` settings. There's more info `in this article <https://help.pythonanywhere.com/pages/DjangoStaticFiles>`_.
|
||||||
|
|
||||||
|
|
||||||
Future deployments
|
Future deployments
|
||||||
|
@ -176,8 +182,9 @@ For subsequent deployments, the procedure is much simpler. In a Bash console:
|
||||||
git pull
|
git pull
|
||||||
python manage.py migrate
|
python manage.py migrate
|
||||||
python manage.py collectstatic
|
python manage.py collectstatic
|
||||||
{%- if cookiecutter.use_compressor == "y" %}python manage.py compress {%- endif %}
|
# if using django-compressor:
|
||||||
|
python manage.py compress
|
||||||
|
|
||||||
And then go to the Web tab and hit **Reload**
|
And then go to the Web tab and hit **Reload**
|
||||||
|
|
||||||
**TIP:** *if you're really keen, you can set up git-push based deployments: https://blog.pythonanywhere.com/87/*
|
.. note:: If you're really keen, you can set up git-push based deployments: https://blog.pythonanywhere.com/87/
|
||||||
|
|
|
@ -82,8 +82,34 @@ The Traefik reverse proxy used in the default configuration will get you a valid
|
||||||
|
|
||||||
You can read more about this feature and how to configure it, at `Automatic HTTPS`_ in the Traefik docs.
|
You can read more about this feature and how to configure it, at `Automatic HTTPS`_ in the Traefik docs.
|
||||||
|
|
||||||
.. _Automatic HTTPS: https://docs.traefik.io/configuration/acme/
|
.. _Automatic HTTPS: https://docs.traefik.io/https/acme/
|
||||||
|
|
||||||
|
.. _webpack-whitenoise-limitation:
|
||||||
|
|
||||||
|
Webpack without Whitenoise limitation
|
||||||
|
-------------------------------------
|
||||||
|
|
||||||
|
If you opt for Webpack without Whitenoise, Webpack needs to know the static URL at build time, when running ``docker-compose build`` (See ``webpack/prod.config.js``). Depending on your setup, this URL may come from the following environment variables:
|
||||||
|
|
||||||
|
- ``AWS_STORAGE_BUCKET_NAME``
|
||||||
|
- ``DJANGO_AWS_S3_CUSTOM_DOMAIN``
|
||||||
|
- ``DJANGO_GCP_STORAGE_BUCKET_NAME``
|
||||||
|
- ``DJANGO_AZURE_CONTAINER_NAME``
|
||||||
|
|
||||||
|
The Django settings are getting these values at runtime via the ``.envs/.production/.django`` file , but Docker does not read this file at build time, it only look for a ``.env`` in the root of the project. Failing to pass the values correctly will result in a page without CSS styles nor javascript.
|
||||||
|
|
||||||
|
To solve this, you can either:
|
||||||
|
|
||||||
|
1. merge all the env files into ``.env`` by running::
|
||||||
|
|
||||||
|
merge_production_dotenvs_in_dotenv.py
|
||||||
|
|
||||||
|
2. create a ``.env`` file in the root of the project with just variables you need. You'll need to also define them in ``.envs/.production/.django`` (hence duplicating them).
|
||||||
|
3. set these variables when running the build command::
|
||||||
|
|
||||||
|
DJANGO_AWS_S3_CUSTOM_DOMAIN=example.com docker-compose -f production.yml build``.
|
||||||
|
|
||||||
|
None of these options are ideal, we're open to suggestions on how to improve this. If you think you have one, please open an issue or a pull request.
|
||||||
|
|
||||||
(Optional) Postgres Data Volume Modifications
|
(Optional) Postgres Data Volume Modifications
|
||||||
---------------------------------------------
|
---------------------------------------------
|
||||||
|
@ -124,8 +150,8 @@ To check the logs out, run::
|
||||||
|
|
||||||
If you want to scale your application, run::
|
If you want to scale your application, run::
|
||||||
|
|
||||||
docker-compose -f production.yml scale django=4
|
docker-compose -f production.yml up --scale django=4
|
||||||
docker-compose -f production.yml scale celeryworker=2
|
docker-compose -f production.yml up --scale celeryworker=2
|
||||||
|
|
||||||
.. warning:: don't try to scale ``postgres``, ``celerybeat``, or ``traefik``.
|
.. warning:: don't try to scale ``postgres``, ``celerybeat``, or ``traefik``.
|
||||||
|
|
||||||
|
|
|
@ -3,9 +3,6 @@ Getting Up and Running Locally With Docker
|
||||||
|
|
||||||
.. index:: Docker
|
.. index:: Docker
|
||||||
|
|
||||||
The steps below will get you up and running with a local development environment.
|
|
||||||
All of these commands assume you are in the root of your generated project.
|
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
If you're new to Docker, please be aware that some resources are cached system-wide
|
If you're new to Docker, please be aware that some resources are cached system-wide
|
||||||
|
@ -18,10 +15,17 @@ Prerequisites
|
||||||
|
|
||||||
* Docker; if you don't have it yet, follow the `installation instructions`_;
|
* Docker; if you don't have it yet, follow the `installation instructions`_;
|
||||||
* Docker Compose; refer to the official documentation for the `installation guide`_.
|
* Docker Compose; refer to the official documentation for the `installation guide`_.
|
||||||
|
* Pre-commit; refer to the official documentation for the `pre-commit`_.
|
||||||
|
* Cookiecutter; refer to the official GitHub repository of `Cookiecutter`_
|
||||||
|
|
||||||
.. _`installation instructions`: https://docs.docker.com/install/#supported-platforms
|
.. _`installation instructions`: https://docs.docker.com/install/#supported-platforms
|
||||||
.. _`installation guide`: https://docs.docker.com/compose/install/
|
.. _`installation guide`: https://docs.docker.com/compose/install/
|
||||||
|
.. _`pre-commit`: https://pre-commit.com/#install
|
||||||
|
.. _`Cookiecutter`: https://github.com/cookiecutter/cookiecutter
|
||||||
|
|
||||||
|
Before Getting Started
|
||||||
|
----------------------
|
||||||
|
.. include:: generate-project-block.rst
|
||||||
|
|
||||||
Build the Stack
|
Build the Stack
|
||||||
---------------
|
---------------
|
||||||
|
@ -32,6 +36,13 @@ This can take a while, especially the first time you run this particular command
|
||||||
|
|
||||||
Generally, if you want to emulate production environment use ``production.yml`` instead. And this is true for any other actions you might need to perform: whenever a switch is required, just do it!
|
Generally, if you want to emulate production environment use ``production.yml`` instead. And this is true for any other actions you might need to perform: whenever a switch is required, just do it!
|
||||||
|
|
||||||
|
Before doing any git commit, `pre-commit`_ should be installed globally on your local machine, and then::
|
||||||
|
|
||||||
|
$ git init
|
||||||
|
$ pre-commit install
|
||||||
|
|
||||||
|
Failing to do so will result with a bunch of CI and Linter errors that can be avoided with pre-commit.
|
||||||
|
|
||||||
|
|
||||||
Run the Stack
|
Run the Stack
|
||||||
-------------
|
-------------
|
||||||
|
@ -116,7 +127,7 @@ Consider the aforementioned ``.envs/.local/.postgres``: ::
|
||||||
|
|
||||||
The three envs we are presented with here are ``POSTGRES_DB``, ``POSTGRES_USER``, and ``POSTGRES_PASSWORD`` (by the way, their values have also been generated for you). You might have figured out already where these definitions will end up; it's all the same with ``django`` service container envs.
|
The three envs we are presented with here are ``POSTGRES_DB``, ``POSTGRES_USER``, and ``POSTGRES_PASSWORD`` (by the way, their values have also been generated for you). You might have figured out already where these definitions will end up; it's all the same with ``django`` service container envs.
|
||||||
|
|
||||||
One final touch: should you ever need to merge ``.envs/production/*`` in a single ``.env`` run the ``merge_production_dotenvs_in_dotenv.py``: ::
|
One final touch: should you ever need to merge ``.envs/.production/*`` in a single ``.env`` run the ``merge_production_dotenvs_in_dotenv.py``: ::
|
||||||
|
|
||||||
$ python merge_production_dotenvs_in_dotenv.py
|
$ python merge_production_dotenvs_in_dotenv.py
|
||||||
|
|
||||||
|
@ -159,16 +170,18 @@ docker
|
||||||
|
|
||||||
The ``container_name`` from the yml file can be used to check on containers with docker commands, for example: ::
|
The ``container_name`` from the yml file can be used to check on containers with docker commands, for example: ::
|
||||||
|
|
||||||
$ docker logs worker
|
$ docker logs <project_slug>_local_celeryworker
|
||||||
$ docker top worker
|
$ docker top <project_slug>_local_celeryworker
|
||||||
|
|
||||||
|
|
||||||
|
Notice that the ``container_name`` is generated dynamically using your project slug as a prefix
|
||||||
|
|
||||||
Mailhog
|
Mailhog
|
||||||
~~~~~~~
|
~~~~~~~
|
||||||
|
|
||||||
When developing locally you can go with MailHog_ for email testing provided ``use_mailhog`` was set to ``y`` on setup. To proceed,
|
When developing locally you can go with MailHog_ for email testing provided ``use_mailhog`` was set to ``y`` on setup. To proceed,
|
||||||
|
|
||||||
#. make sure ``mailhog`` container is up and running;
|
#. make sure ``<project_slug>_local_mailhog`` container is up and running;
|
||||||
|
|
||||||
#. open up ``http://127.0.0.1:8025``.
|
#. open up ``http://127.0.0.1:8025``.
|
||||||
|
|
||||||
|
@ -178,10 +191,9 @@ When developing locally you can go with MailHog_ for email testing provided ``us
|
||||||
|
|
||||||
Celery tasks in local development
|
Celery tasks in local development
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
When not using docker Celery tasks are set to run in Eager mode, so that a full stack is not needed. When using docker the task
|
When not using docker Celery tasks are set to run in Eager mode, so that a full stack is not needed. When using docker the task scheduler will be used by default.
|
||||||
scheduler will be used by default.
|
|
||||||
|
|
||||||
If you need tasks to be executed on the main thread during development set CELERY_TASK_ALWAYS_EAGER = True in config/settings/local.py.
|
If you need tasks to be executed on the main thread during development set ``CELERY_TASK_ALWAYS_EAGER = True`` in ``config/settings/local.py``.
|
||||||
|
|
||||||
Possible uses could be for testing, or ease of profiling with DJDT.
|
Possible uses could be for testing, or ease of profiling with DJDT.
|
||||||
|
|
||||||
|
@ -200,3 +212,101 @@ Prerequisites:
|
||||||
By default, it's enabled both in local and production environments (``local.yml`` and ``production.yml`` Docker Compose configs, respectively) through a ``flower`` service. For added security, ``flower`` requires its clients to provide authentication credentials specified as the corresponding environments' ``.envs/.local/.django`` and ``.envs/.production/.django`` ``CELERY_FLOWER_USER`` and ``CELERY_FLOWER_PASSWORD`` environment variables. Check out ``localhost:5555`` and see for yourself.
|
By default, it's enabled both in local and production environments (``local.yml`` and ``production.yml`` Docker Compose configs, respectively) through a ``flower`` service. For added security, ``flower`` requires its clients to provide authentication credentials specified as the corresponding environments' ``.envs/.local/.django`` and ``.envs/.production/.django`` ``CELERY_FLOWER_USER`` and ``CELERY_FLOWER_PASSWORD`` environment variables. Check out ``localhost:5555`` and see for yourself.
|
||||||
|
|
||||||
.. _`Flower`: https://github.com/mher/flower
|
.. _`Flower`: https://github.com/mher/flower
|
||||||
|
|
||||||
|
Using Webpack or Gulp
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
When using Webpack or Gulp as the ``frontend_pipeline`` option, you should access your application at the address of the ``node`` service in order to see your correct styles. This is http://localhost:3000 by default. When using any of the other ``frontend_pipeline`` options, you should use the address of the ``django`` service, http://localhost:8000.
|
||||||
|
|
||||||
|
Developing locally with HTTPS
|
||||||
|
-----------------------------
|
||||||
|
|
||||||
|
Increasingly it is becoming necessary to develop software in a secure environment in order that there are very few changes when deploying to production. Recently Facebook changed their policies for apps/sites that use Facebook login which requires the use of an HTTPS URL for the OAuth redirect URL. So if you want to use the ``users`` application with a OAuth provider such as Facebook, securing your communication to the local development environment will be necessary.
|
||||||
|
|
||||||
|
In order to create a secure environment, we need to have a trusted SSL certificate installed in our Docker application.
|
||||||
|
|
||||||
|
#. **Let's Encrypt**
|
||||||
|
|
||||||
|
The official line from Let’s Encrypt is:
|
||||||
|
|
||||||
|
[For local development section] ... The best option: Generate your own certificate, either self-signed or signed by a local root, and trust it in your operating system’s trust store. Then use that certificate in your local web server. See below for details.
|
||||||
|
|
||||||
|
See `letsencrypt.org - certificates-for-localhost`_
|
||||||
|
|
||||||
|
.. _`letsencrypt.org - certificates-for-localhost`: https://letsencrypt.org/docs/certificates-for-localhost/
|
||||||
|
|
||||||
|
#. **mkcert: Valid Https Certificates For Localhost**
|
||||||
|
|
||||||
|
`mkcert`_ is a simple by design tool that hides all the arcane knowledge required to generate valid TLS certificates. It works for any hostname or IP, including localhost. It supports macOS, Linux, and Windows, and Firefox, Chrome and Java. It even works on mobile devices with a couple manual steps.
|
||||||
|
|
||||||
|
See https://blog.filippo.io/mkcert-valid-https-certificates-for-localhost/
|
||||||
|
|
||||||
|
.. _`mkcert`: https://github.com/FiloSottile/mkcert/blob/master/README.md#supported-root-stores
|
||||||
|
|
||||||
|
After installing a trusted TLS certificate, configure your docker installation. We are going to configure an ``nginx`` reverse-proxy server. This makes sure that it does not interfere with our ``traefik`` configuration that is reserved for production environments.
|
||||||
|
|
||||||
|
These are the places that you should configure to secure your local environment.
|
||||||
|
|
||||||
|
certs
|
||||||
|
~~~~~
|
||||||
|
|
||||||
|
Take the certificates that you generated and place them in a folder called ``certs`` in the project's root folder. Assuming that you registered your local hostname as ``my-dev-env.local``, the certificates you will put in the folder should have the names ``my-dev-env.local.crt`` and ``my-dev-env.local.key``.
|
||||||
|
|
||||||
|
local.yml
|
||||||
|
~~~~~~~~~
|
||||||
|
|
||||||
|
#. Add the ``nginx-proxy`` service. ::
|
||||||
|
|
||||||
|
...
|
||||||
|
|
||||||
|
nginx-proxy:
|
||||||
|
image: jwilder/nginx-proxy:alpine
|
||||||
|
container_name: nginx-proxy
|
||||||
|
ports:
|
||||||
|
- "80:80"
|
||||||
|
- "443:443"
|
||||||
|
volumes:
|
||||||
|
- /var/run/docker.sock:/tmp/docker.sock:ro
|
||||||
|
- ./certs:/etc/nginx/certs
|
||||||
|
restart: always
|
||||||
|
depends_on:
|
||||||
|
- django
|
||||||
|
|
||||||
|
...
|
||||||
|
|
||||||
|
#. Link the ``nginx-proxy`` to ``django`` through environment variables.
|
||||||
|
|
||||||
|
``django`` already has an ``.env`` file connected to it. Add the following variables. You should do this especially if you are working with a team and you want to keep your local environment details to yourself.
|
||||||
|
|
||||||
|
::
|
||||||
|
|
||||||
|
# HTTPS
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
VIRTUAL_HOST=my-dev-env.local
|
||||||
|
VIRTUAL_PORT=8000
|
||||||
|
|
||||||
|
The services run behind the reverse proxy.
|
||||||
|
|
||||||
|
config/settings/local.py
|
||||||
|
~~~~~~~~~~~~~~~~~~~~~~~~
|
||||||
|
|
||||||
|
You should allow the new hostname. ::
|
||||||
|
|
||||||
|
ALLOWED_HOSTS = ["localhost", "0.0.0.0", "127.0.0.1", "my-dev-env.local"]
|
||||||
|
|
||||||
|
Rebuild your ``docker`` application. ::
|
||||||
|
|
||||||
|
$ docker-compose -f local.yml up -d --build
|
||||||
|
|
||||||
|
Go to your browser and type in your URL bar ``https://my-dev-env.local``
|
||||||
|
|
||||||
|
See `https with nginx`_ for more information on this configuration.
|
||||||
|
|
||||||
|
.. _`https with nginx`: https://codewithhugo.com/docker-compose-local-https/
|
||||||
|
|
||||||
|
.gitignore
|
||||||
|
~~~~~~~~~~
|
||||||
|
|
||||||
|
Add ``certs/*`` to the ``.gitignore`` file. This allows the folder to be included in the repo but its contents to be ignored.
|
||||||
|
|
||||||
|
*This configuration is for local development environments only. Do not use this for production since you might expose your local* ``rootCA-key.pem``.
|
||||||
|
|
|
@ -9,7 +9,7 @@ Setting Up Development Environment
|
||||||
|
|
||||||
Make sure to have the following on your host:
|
Make sure to have the following on your host:
|
||||||
|
|
||||||
* Python 3.8
|
* Python 3.10
|
||||||
* PostgreSQL_.
|
* PostgreSQL_.
|
||||||
* Redis_, if using Celery
|
* Redis_, if using Celery
|
||||||
* Cookiecutter_
|
* Cookiecutter_
|
||||||
|
@ -18,30 +18,32 @@ First things first.
|
||||||
|
|
||||||
#. Create a virtualenv: ::
|
#. Create a virtualenv: ::
|
||||||
|
|
||||||
$ python3.8 -m venv <virtual env path>
|
$ python3.10 -m venv <virtual env path>
|
||||||
|
|
||||||
#. Activate the virtualenv you have just created: ::
|
#. Activate the virtualenv you have just created: ::
|
||||||
|
|
||||||
$ source <virtual env path>/bin/activate
|
$ source <virtual env path>/bin/activate
|
||||||
|
|
||||||
#. Install cookiecutter-django
|
#.
|
||||||
|
.. include:: generate-project-block.rst
|
||||||
$ cookiecutter gh:pydanny/cookiecutter-django ::
|
|
||||||
|
|
||||||
#. Install development requirements: ::
|
#. Install development requirements: ::
|
||||||
|
|
||||||
|
$ cd <what you have entered as the project_slug at setup stage>
|
||||||
$ pip install -r requirements/local.txt
|
$ pip install -r requirements/local.txt
|
||||||
$ git init # A git repo is required for pre-commit to install
|
$ git init # A git repo is required for pre-commit to install
|
||||||
$ pre-commit install
|
$ pre-commit install
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
the `pre-commit` exists in the generated project as default.
|
the `pre-commit` hook exists in the generated project as default.
|
||||||
for the details of `pre-commit`, follow the [site of pre-commit](https://pre-commit.com/).
|
For the details of `pre-commit`, follow the `pre-commit`_ site.
|
||||||
|
|
||||||
#. Create a new PostgreSQL database using createdb_: ::
|
#. Create a new PostgreSQL database using createdb_: ::
|
||||||
|
|
||||||
$ createdb <what you have entered as the project_slug at setup stage> -U postgres --password <password>
|
$ createdb --username=postgres <project_slug>
|
||||||
|
|
||||||
|
``project_slug`` is what you have entered as the project_slug at the setup stage.
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
|
||||||
|
@ -80,14 +82,15 @@ First things first.
|
||||||
|
|
||||||
or if you're running asynchronously: ::
|
or if you're running asynchronously: ::
|
||||||
|
|
||||||
$ gunicorn config.asgi --bind 0.0.0.0:8000 -k uvicorn.workers.UvicornWorker --reload
|
$ uvicorn config.asgi:application --host 0.0.0.0 --reload --reload-include '*.html'
|
||||||
|
|
||||||
.. _PostgreSQL: https://www.postgresql.org/download/
|
.. _PostgreSQL: https://www.postgresql.org/download/
|
||||||
.. _Redis: https://redis.io/download
|
.. _Redis: https://redis.io/download
|
||||||
.. _CookieCutter: https://github.com/cookiecutter/cookiecutter
|
.. _CookieCutter: https://github.com/cookiecutter/cookiecutter
|
||||||
.. _createdb: https://www.postgresql.org/docs/current/static/app-createdb.html
|
.. _createdb: https://www.postgresql.org/docs/current/static/app-createdb.html
|
||||||
.. _initial PostgreSQL set up: http://suite.opengeo.org/docs/latest/dataadmin/pgGettingStarted/firstconnect.html
|
.. _initial PostgreSQL set up: https://web.archive.org/web/20190303010033/http://suite.opengeo.org/docs/latest/dataadmin/pgGettingStarted/firstconnect.html
|
||||||
.. _postgres documentation: https://www.postgresql.org/docs/current/static/auth-pg-hba-conf.html
|
.. _postgres documentation: https://www.postgresql.org/docs/current/static/auth-pg-hba-conf.html
|
||||||
|
.. _pre-commit: https://pre-commit.com/
|
||||||
.. _direnv: https://direnv.net/
|
.. _direnv: https://direnv.net/
|
||||||
|
|
||||||
|
|
||||||
|
@ -138,18 +141,55 @@ In production, we have Mailgun_ configured to have your back!
|
||||||
Celery
|
Celery
|
||||||
------
|
------
|
||||||
|
|
||||||
If the project is configured to use Celery as a task scheduler then by default tasks are set to run on the main thread
|
If the project is configured to use Celery as a task scheduler then, by default, tasks are set to run on the main thread when developing locally instead of getting sent to a broker. However, if you have Redis setup on your local machine, you can set the following in ``config/settings/local.py``::
|
||||||
when developing locally. If you have the appropriate setup on your local machine then set the following
|
|
||||||
in ``config/settings/local.py``::
|
|
||||||
|
|
||||||
CELERY_TASK_ALWAYS_EAGER = False
|
CELERY_TASK_ALWAYS_EAGER = False
|
||||||
|
|
||||||
|
Next, make sure `redis-server` is installed (per the `Getting started with Redis`_ guide) and run the server in one terminal::
|
||||||
|
|
||||||
|
$ redis-server
|
||||||
|
|
||||||
|
Start the Celery worker by running the following command in another terminal::
|
||||||
|
|
||||||
|
$ celery -A config.celery_app worker --loglevel=info
|
||||||
|
|
||||||
|
That Celery worker should be running whenever your app is running, typically as a background process,
|
||||||
|
so that it can pick up any tasks that get queued. Learn more from the `Celery Workers Guide`_.
|
||||||
|
|
||||||
|
The project comes with a simple task for manual testing purposes, inside `<project_slug>/users/tasks.py`. To queue that task locally, start the Django shell, import the task, and call `delay()` on it::
|
||||||
|
|
||||||
|
$ python manage.py shell
|
||||||
|
>> from <project_slug>.users.tasks import get_users_count
|
||||||
|
>> get_users_count.delay()
|
||||||
|
|
||||||
|
You can also use Django admin to queue up tasks, thanks to the `django-celerybeat`_ package.
|
||||||
|
|
||||||
|
.. _Getting started with Redis guide: https://redis.io/docs/getting-started/
|
||||||
|
.. _Celery Workers Guide: https://docs.celeryq.dev/en/stable/userguide/workers.html
|
||||||
|
.. _django-celerybeat: https://django-celery-beat.readthedocs.io/en/latest/
|
||||||
|
|
||||||
|
|
||||||
Sass Compilation & Live Reloading
|
Sass Compilation & Live Reloading
|
||||||
---------------------------------
|
---------------------------------
|
||||||
|
|
||||||
If you’d like to take advantage of live reloading and Sass compilation you can do so with a little
|
If you've opted for Gulp or Webpack as front-end pipeline, the project comes configured with `Sass`_ compilation and `live reloading`_. As you change you Sass/JS source files, the task runner will automatically rebuild the corresponding CSS and JS assets and reload them in your browser without refreshing the page.
|
||||||
bit of preparation, see :ref:`sass-compilation-live-reload`.
|
|
||||||
|
#. Make sure that `Node.js`_ v16 is installed on your machine.
|
||||||
|
#. In the project root, install the JS dependencies with::
|
||||||
|
|
||||||
|
$ npm install
|
||||||
|
|
||||||
|
#. Now - with your virtualenv activated - start the application by running::
|
||||||
|
|
||||||
|
$ npm run dev
|
||||||
|
|
||||||
|
The app will now run with live reloading enabled, applying front-end changes dynamically.
|
||||||
|
|
||||||
|
.. note:: The task will start 2 processes in parallel: the static assets build loop on one side, and the Django server on the other. You do NOT need to run Django as your would normally with ``manage.py runserver``.
|
||||||
|
|
||||||
|
.. _Node.js: http://nodejs.org/download/
|
||||||
|
.. _Sass: https://sass-lang.com/
|
||||||
|
.. _live reloading: https://browsersync.io
|
||||||
|
|
||||||
Summary
|
Summary
|
||||||
-------
|
-------
|
||||||
|
|
|
@ -55,8 +55,11 @@ With a single backup file copied to ``.`` that would be ::
|
||||||
|
|
||||||
$ docker cp 9c5c3f055843:/backups/backup_2018_03_13T09_05_07.sql.gz .
|
$ docker cp 9c5c3f055843:/backups/backup_2018_03_13T09_05_07.sql.gz .
|
||||||
|
|
||||||
.. _`command`: https://docs.docker.com/engine/reference/commandline/cp/
|
You can also get the container ID using ``docker-compose -f local.yml ps -q postgres`` so if you want to automate your backups, you don't have to check the container ID manually every time. Here is the full command ::
|
||||||
|
|
||||||
|
$ docker cp $(docker-compose -f local.yml ps -q postgres):/backups ./backups
|
||||||
|
|
||||||
|
.. _`command`: https://docs.docker.com/engine/reference/commandline/cp/
|
||||||
|
|
||||||
Restoring from the Existing Backup
|
Restoring from the Existing Backup
|
||||||
----------------------------------
|
----------------------------------
|
||||||
|
|
|
@ -4,42 +4,41 @@ Document
|
||||||
=========
|
=========
|
||||||
|
|
||||||
This project uses Sphinx_ documentation generator.
|
This project uses Sphinx_ documentation generator.
|
||||||
After you have set up to `develop locally`_, run the following commands to generate the HTML documentation: ::
|
|
||||||
|
|
||||||
$ sphinx-build docs/ docs/_build/html/
|
After you have set up to `develop locally`_, run the following command from the project directory to build and serve HTML documentation: ::
|
||||||
|
|
||||||
|
$ make -C docs livehtml
|
||||||
|
|
||||||
If you set up your project to `develop locally with docker`_, run the following command: ::
|
If you set up your project to `develop locally with docker`_, run the following command: ::
|
||||||
|
|
||||||
$ docker-compose -f local.yml run --rm django sphinx-build docs/ docs/_build/html/
|
$ docker-compose -f local.yml up docs
|
||||||
|
|
||||||
|
Navigate to port 9000 on your host to see the documentation. This will be opened automatically at `localhost`_ for local, non-docker development.
|
||||||
|
|
||||||
|
Note: using Docker for documentation sets up a temporary SQLite file by setting the environment variable ``DATABASE_URL=sqlite:///readthedocs.db`` in ``docs/conf.py`` to avoid a dependency on PostgreSQL.
|
||||||
|
|
||||||
Generate API documentation
|
Generate API documentation
|
||||||
----------------------------
|
----------------------------
|
||||||
|
|
||||||
Sphinx can automatically generate documentation from docstrings, to enable this feature, follow these steps:
|
Edit the ``docs`` files and project application docstrings to create your documentation.
|
||||||
|
|
||||||
1. Add Sphinx extension in ``docs/conf.py`` file, like below: ::
|
Sphinx can automatically include class and function signatures and docstrings in generated documentation.
|
||||||
|
See the generated project documentation for more examples.
|
||||||
|
|
||||||
extensions = [
|
Setting up ReadTheDocs
|
||||||
'sphinx.ext.autodoc',
|
----------------------
|
||||||
]
|
|
||||||
|
|
||||||
2. Uncomment the following lines in the ``docs/conf.py`` file: ::
|
To setup your documentation on `ReadTheDocs`_, you must
|
||||||
|
|
||||||
# import django
|
1. Go to `ReadTheDocs`_ and login/create an account
|
||||||
# sys.path.insert(0, os.path.abspath('..'))
|
2. Add your GitHub repository
|
||||||
# os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.local")
|
3. Trigger a build
|
||||||
# django.setup()
|
|
||||||
|
|
||||||
3. Run the following command: ::
|
Additionally, you can auto-build Pull Request previews, but `you must enable it`_.
|
||||||
|
|
||||||
$ sphinx-apidoc -f -o ./docs/modules/ ./tpub/ migrations/*
|
|
||||||
|
|
||||||
If you set up your project to `develop locally with docker`_, run the following command: ::
|
|
||||||
|
|
||||||
$ docker-compose -f local.yml run --rm django sphinx-apidoc -f -o ./docs/modules ./tpub/ migrations/*
|
|
||||||
|
|
||||||
4. Regenerate HTML documentation as written above.
|
|
||||||
|
|
||||||
|
.. _localhost: http://localhost:9000/
|
||||||
.. _Sphinx: https://www.sphinx-doc.org/en/master/index.html
|
.. _Sphinx: https://www.sphinx-doc.org/en/master/index.html
|
||||||
.. _develop locally: ./developing-locally.html
|
.. _develop locally: ./developing-locally.html
|
||||||
.. _develop locally with docker: ./developing-locally-docker.html
|
.. _develop locally with docker: ./developing-locally-docker.html
|
||||||
|
.. _ReadTheDocs: https://readthedocs.org/
|
||||||
|
.. _you must enable it: https://docs.readthedocs.io/en/latest/guides/autobuild-docs-for-pull-requests.html#autobuild-documentation-for-pull-requests
|
||||||
|
|
|
@ -6,11 +6,11 @@ FAQ
|
||||||
Why is there a django.contrib.sites directory in Cookiecutter Django?
|
Why is there a django.contrib.sites directory in Cookiecutter Django?
|
||||||
---------------------------------------------------------------------
|
---------------------------------------------------------------------
|
||||||
|
|
||||||
It is there to add a migration so you don't have to manually change the ``sites.Site`` record from ``example.com`` to whatever your domain is. Instead, your ``{{cookiecutter.domain_name}}`` and {{cookiecutter.project_name}} value is placed by **Cookiecutter** in the domain and name fields respectively.
|
It is there to add a migration so you don't have to manually change the ``sites.Site`` record from ``example.com`` to whatever your domain is. Instead, your ``{{cookiecutter.domain_name}}`` and ``{{cookiecutter.project_name}}`` value is placed by **Cookiecutter** in the domain and name fields respectively.
|
||||||
|
|
||||||
See `0003_set_site_domain_and_name.py`_.
|
See `0003_set_site_domain_and_name.py`_.
|
||||||
|
|
||||||
.. _`0003_set_site_domain_and_name.py`: https://github.com/pydanny/cookiecutter-django/blob/master/%7B%7Bcookiecutter.project_slug%7D%7D/%7B%7Bcookiecutter.project_slug%7D%7D/contrib/sites/migrations/0003_set_site_domain_and_name.py
|
.. _`0003_set_site_domain_and_name.py`: https://github.com/cookiecutter/cookiecutter-django/blob/master/%7B%7Bcookiecutter.project_slug%7D%7D/%7B%7Bcookiecutter.project_slug%7D%7D/contrib/sites/migrations/0003_set_site_domain_and_name.py
|
||||||
|
|
||||||
|
|
||||||
Why aren't you using just one configuration file (12-Factor App)
|
Why aren't you using just one configuration file (12-Factor App)
|
||||||
|
|
7
docs/generate-project-block.rst
Normal file
7
docs/generate-project-block.rst
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
Generate a new cookiecutter-django project: ::
|
||||||
|
|
||||||
|
$ cookiecutter gh:cookiecutter/cookiecutter-django
|
||||||
|
|
||||||
|
For more information refer to
|
||||||
|
:ref:`Project Generation Options <template-options>`.
|
||||||
|
|
|
@ -1,13 +1,14 @@
|
||||||
.. cookiecutter-django documentation master file.
|
.. cookiecutter-django documentation master file.
|
||||||
|
|
||||||
Welcome to Cookiecutter Django's documentation!
|
Welcome to Cookiecutter Django's documentation!
|
||||||
====================================================================
|
===============================================
|
||||||
|
|
||||||
A Cookiecutter_ template for Django.
|
Powered by Cookiecutter_, Cookiecutter Django is a project template for jumpstarting production-ready Django projects. The template offers a number of generation options, we invite you to check the :ref:`dedicated page <template-options>` to learn more about each of them.
|
||||||
|
|
||||||
.. _cookiecutter: https://github.com/cookiecutter/cookiecutter
|
.. _cookiecutter: https://github.com/cookiecutter/cookiecutter
|
||||||
|
|
||||||
Contents:
|
Contents
|
||||||
|
--------
|
||||||
|
|
||||||
.. toctree::
|
.. toctree::
|
||||||
:maxdepth: 2
|
:maxdepth: 2
|
||||||
|
@ -23,11 +24,12 @@ Contents:
|
||||||
deployment-on-heroku
|
deployment-on-heroku
|
||||||
deployment-with-docker
|
deployment-with-docker
|
||||||
docker-postgres-backups
|
docker-postgres-backups
|
||||||
|
websocket
|
||||||
faq
|
faq
|
||||||
troubleshooting
|
troubleshooting
|
||||||
|
|
||||||
Indices and tables
|
Indices and tables
|
||||||
==================
|
------------------
|
||||||
|
|
||||||
* :ref:`genindex`
|
* :ref:`genindex`
|
||||||
* :ref:`search`
|
* :ref:`search`
|
||||||
|
|
|
@ -19,7 +19,7 @@ The config for flake8 is located in setup.cfg. It specifies:
|
||||||
pylint
|
pylint
|
||||||
------
|
------
|
||||||
|
|
||||||
This is included in flake8's checks, but you can also run it separately to see a more detailed report: ::
|
To run pylint: ::
|
||||||
|
|
||||||
$ pylint <python files that you wish to lint>
|
$ pylint <python files that you wish to lint>
|
||||||
|
|
||||||
|
|
|
@ -1,24 +0,0 @@
|
||||||
.. _sass-compilation-live-reload:
|
|
||||||
|
|
||||||
Sass Compilation & Live Reloading
|
|
||||||
=================================
|
|
||||||
|
|
||||||
If you'd like to take advantage of `live reload`_ and Sass compilation:
|
|
||||||
|
|
||||||
- Make sure that nodejs_ is installed. Then in the project root run::
|
|
||||||
|
|
||||||
$ npm install
|
|
||||||
|
|
||||||
.. _nodejs: http://nodejs.org/download/
|
|
||||||
|
|
||||||
- Now you just need::
|
|
||||||
|
|
||||||
$ npm run dev
|
|
||||||
|
|
||||||
The base app will now run as it would with the usual ``manage.py runserver`` but with live reloading and Sass compilation enabled.
|
|
||||||
When changing your Sass files, they will be automatically recompiled and change will be reflected in your browser without refreshing.
|
|
||||||
|
|
||||||
To get live reloading to work you'll probably need to install an `appropriate browser extension`_
|
|
||||||
|
|
||||||
.. _live reload: http://livereload.com/
|
|
||||||
.. _appropriate browser extension: http://livereload.com/extensions/
|
|
|
@ -1,7 +1,7 @@
|
||||||
If your email server used to send email isn't configured properly (Mailgun by default),
|
If your email server used to send email isn't configured properly (Mailgun by default),
|
||||||
attempting to send an email will cause an Internal Server Error.
|
attempting to send an email will cause an Internal Server Error.
|
||||||
|
|
||||||
By default, django-allauth is setup to `have emails verifications mandatory`_,
|
By default, ``django-allauth`` is setup to `have emails verifications mandatory`_,
|
||||||
which means it'll send a verification email when an unverified user tries to
|
which means it'll send a verification email when an unverified user tries to
|
||||||
log-in or when someone tries to sign-up.
|
log-in or when someone tries to sign-up.
|
||||||
|
|
||||||
|
|
|
@ -1,6 +1,12 @@
|
||||||
|
.. _template-options:
|
||||||
|
|
||||||
Project Generation Options
|
Project Generation Options
|
||||||
==========================
|
==========================
|
||||||
|
|
||||||
|
This page describes all the template options that will be prompted by the `cookiecutter CLI`_ prior to generating your project.
|
||||||
|
|
||||||
|
.. _cookiecutter CLI: https://github.com/cookiecutter/cookiecutter
|
||||||
|
|
||||||
project_name:
|
project_name:
|
||||||
Your project's human-readable name, capitals and spaces allowed.
|
Your project's human-readable name, capitals and spaces allowed.
|
||||||
|
|
||||||
|
@ -49,24 +55,19 @@ use_docker:
|
||||||
postgresql_version:
|
postgresql_version:
|
||||||
Select a PostgreSQL_ version to use. The choices are:
|
Select a PostgreSQL_ version to use. The choices are:
|
||||||
|
|
||||||
1. 11.3
|
1. 14
|
||||||
2. 10.8
|
2. 13
|
||||||
3. 9.6
|
3. 12
|
||||||
4. 9.5
|
4. 11
|
||||||
5. 9.4
|
5. 10
|
||||||
|
|
||||||
js_task_runner:
|
|
||||||
Select a JavaScript task runner. The choices are:
|
|
||||||
|
|
||||||
1. None
|
|
||||||
2. Gulp_
|
|
||||||
|
|
||||||
cloud_provider:
|
cloud_provider:
|
||||||
Select a cloud provider for static & media files. The choices are:
|
Select a cloud provider for static & media files. The choices are:
|
||||||
|
|
||||||
1. AWS_
|
1. AWS_
|
||||||
2. GCP_
|
2. GCP_
|
||||||
3. None
|
3. Azure_
|
||||||
|
4. None
|
||||||
|
|
||||||
Note that if you choose no cloud provider, media files won't work.
|
Note that if you choose no cloud provider, media files won't work.
|
||||||
|
|
||||||
|
@ -89,13 +90,15 @@ use_async:
|
||||||
use_drf:
|
use_drf:
|
||||||
Indicates whether the project should be configured to use `Django Rest Framework`_.
|
Indicates whether the project should be configured to use `Django Rest Framework`_.
|
||||||
|
|
||||||
custom_bootstrap_compilation:
|
frontend_pipeline:
|
||||||
Indicates whether the project should support Bootstrap recompilation
|
Select a pipeline to compile and optimise frontend assets (JS, CSS, ...):
|
||||||
via the selected JavaScript task runner's task. This can be useful
|
|
||||||
for real-time Bootstrap variable alteration.
|
|
||||||
|
|
||||||
use_compressor:
|
1. None
|
||||||
Indicates whether the project should be configured to use `Django Compressor`_.
|
2. `Django Compressor`_
|
||||||
|
3. `Gulp`_
|
||||||
|
4. `Webpack`_
|
||||||
|
|
||||||
|
Both Gulp and Webpack support Bootstrap recompilation with real-time variables alteration.
|
||||||
|
|
||||||
use_celery:
|
use_celery:
|
||||||
Indicates whether the project should be configured to use Celery_.
|
Indicates whether the project should be configured to use Celery_.
|
||||||
|
@ -119,6 +122,7 @@ ci_tool:
|
||||||
1. None
|
1. None
|
||||||
2. `Travis CI`_
|
2. `Travis CI`_
|
||||||
3. `Gitlab CI`_
|
3. `Gitlab CI`_
|
||||||
|
4. `Github Actions`_
|
||||||
|
|
||||||
keep_local_envs_in_vcs:
|
keep_local_envs_in_vcs:
|
||||||
Indicates whether the project's ``.envs/.local/`` should be kept in VCS
|
Indicates whether the project's ``.envs/.local/`` should be kept in VCS
|
||||||
|
@ -144,9 +148,11 @@ debug:
|
||||||
.. _PostgreSQL: https://www.postgresql.org/docs/
|
.. _PostgreSQL: https://www.postgresql.org/docs/
|
||||||
|
|
||||||
.. _Gulp: https://github.com/gulpjs/gulp
|
.. _Gulp: https://github.com/gulpjs/gulp
|
||||||
|
.. _Webpack: https://webpack.js.org
|
||||||
|
|
||||||
.. _AWS: https://aws.amazon.com/s3/
|
.. _AWS: https://aws.amazon.com/s3/
|
||||||
.. _GCP: https://cloud.google.com/storage/
|
.. _GCP: https://cloud.google.com/storage/
|
||||||
|
.. _Azure: https://azure.microsoft.com/en-us/products/storage/blobs/
|
||||||
|
|
||||||
.. _Amazon SES: https://aws.amazon.com/ses/
|
.. _Amazon SES: https://aws.amazon.com/ses/
|
||||||
.. _Mailgun: https://www.mailgun.com
|
.. _Mailgun: https://www.mailgun.com
|
||||||
|
@ -176,3 +182,4 @@ debug:
|
||||||
|
|
||||||
.. _GitLab CI: https://docs.gitlab.com/ee/ci/
|
.. _GitLab CI: https://docs.gitlab.com/ee/ci/
|
||||||
|
|
||||||
|
.. _Github Actions: https://docs.github.com/en/actions
|
||||||
|
|
2
docs/requirements.txt
Normal file
2
docs/requirements.txt
Normal file
|
@ -0,0 +1,2 @@
|
||||||
|
sphinx==5.3.0
|
||||||
|
sphinx-rtd-theme==1.1.1
|
|
@ -45,9 +45,16 @@ DJANGO_AWS_ACCESS_KEY_ID AWS_ACCESS_KEY_ID n/a
|
||||||
DJANGO_AWS_SECRET_ACCESS_KEY AWS_SECRET_ACCESS_KEY n/a raises error
|
DJANGO_AWS_SECRET_ACCESS_KEY AWS_SECRET_ACCESS_KEY n/a raises error
|
||||||
DJANGO_AWS_STORAGE_BUCKET_NAME AWS_STORAGE_BUCKET_NAME n/a raises error
|
DJANGO_AWS_STORAGE_BUCKET_NAME AWS_STORAGE_BUCKET_NAME n/a raises error
|
||||||
DJANGO_AWS_S3_REGION_NAME AWS_S3_REGION_NAME n/a None
|
DJANGO_AWS_S3_REGION_NAME AWS_S3_REGION_NAME n/a None
|
||||||
|
DJANGO_AWS_S3_CUSTOM_DOMAIN AWS_S3_CUSTOM_DOMAIN n/a None
|
||||||
|
DJANGO_AWS_S3_MAX_MEMORY_SIZE AWS_S3_MAX_MEMORY_SIZE n/a 100_000_000
|
||||||
DJANGO_GCP_STORAGE_BUCKET_NAME GS_BUCKET_NAME n/a raises error
|
DJANGO_GCP_STORAGE_BUCKET_NAME GS_BUCKET_NAME n/a raises error
|
||||||
GOOGLE_APPLICATION_CREDENTIALS n/a n/a raises error
|
GOOGLE_APPLICATION_CREDENTIALS n/a n/a raises error
|
||||||
|
DJANGO_AZURE_ACCOUNT_KEY AZURE_ACCOUNT_KEY n/a raises error
|
||||||
|
DJANGO_AZURE_ACCOUNT_NAME AZURE_ACCOUNT_NAME n/a raises error
|
||||||
|
DJANGO_AZURE_CONTAINER_NAME AZURE_CONTAINER n/a raises error
|
||||||
SENTRY_DSN SENTRY_DSN n/a raises error
|
SENTRY_DSN SENTRY_DSN n/a raises error
|
||||||
|
SENTRY_ENVIRONMENT n/a n/a production
|
||||||
|
SENTRY_TRACES_SAMPLE_RATE n/a n/a 0.0
|
||||||
DJANGO_SENTRY_LOG_LEVEL SENTRY_LOG_LEVEL n/a logging.INFO
|
DJANGO_SENTRY_LOG_LEVEL SENTRY_LOG_LEVEL n/a logging.INFO
|
||||||
MAILGUN_API_KEY MAILGUN_API_KEY n/a raises error
|
MAILGUN_API_KEY MAILGUN_API_KEY n/a raises error
|
||||||
MAILGUN_DOMAIN MAILGUN_SENDER_DOMAIN n/a raises error
|
MAILGUN_DOMAIN MAILGUN_SENDER_DOMAIN n/a raises error
|
||||||
|
|
|
@ -28,10 +28,15 @@ Coverage
|
||||||
|
|
||||||
You should build your tests to provide the highest level of **code coverage**. You can run the ``pytest`` with code ``coverage`` by typing in the following command: ::
|
You should build your tests to provide the highest level of **code coverage**. You can run the ``pytest`` with code ``coverage`` by typing in the following command: ::
|
||||||
|
|
||||||
$ docker-compose -f local.yml run --rm django coverage run -m pytest
|
$ coverage run -m pytest
|
||||||
|
|
||||||
Once the tests are complete, in order to see the code coverage, run the following command: ::
|
Once the tests are complete, in order to see the code coverage, run the following command: ::
|
||||||
|
|
||||||
|
$ coverage report
|
||||||
|
|
||||||
|
If you're running the project locally with Docker, use these commands instead: ::
|
||||||
|
|
||||||
|
$ docker-compose -f local.yml run --rm django coverage run -m pytest
|
||||||
$ docker-compose -f local.yml run --rm django coverage report
|
$ docker-compose -f local.yml run --rm django coverage report
|
||||||
|
|
||||||
.. note::
|
.. note::
|
||||||
|
@ -53,4 +58,4 @@ Once the tests are complete, in order to see the code coverage, run the followin
|
||||||
.. _develop locally with docker: ./developing-locally-docker.html
|
.. _develop locally with docker: ./developing-locally-docker.html
|
||||||
.. _customize: https://docs.pytest.org/en/latest/customize.html
|
.. _customize: https://docs.pytest.org/en/latest/customize.html
|
||||||
.. _unittest: https://docs.python.org/3/library/unittest.html#module-unittest
|
.. _unittest: https://docs.python.org/3/library/unittest.html#module-unittest
|
||||||
.. _configuring: https://coverage.readthedocs.io/en/v4.5.x/config.html
|
.. _configuring: https://coverage.readthedocs.io/en/latest/config.html
|
||||||
|
|
|
@ -1,5 +1,5 @@
|
||||||
Troubleshooting
|
Troubleshooting
|
||||||
=====================================
|
===============
|
||||||
|
|
||||||
This page contains some advice about errors and problems commonly encountered during the development of Cookiecutter Django applications.
|
This page contains some advice about errors and problems commonly encountered during the development of Cookiecutter Django applications.
|
||||||
|
|
||||||
|
@ -38,6 +38,16 @@ To fix this, you can either:
|
||||||
.. _rm: https://docs.docker.com/engine/reference/commandline/volume_rm/
|
.. _rm: https://docs.docker.com/engine/reference/commandline/volume_rm/
|
||||||
.. _prune: https://docs.docker.com/v17.09/engine/reference/commandline/system_prune/
|
.. _prune: https://docs.docker.com/v17.09/engine/reference/commandline/system_prune/
|
||||||
|
|
||||||
|
Variable is not set. Defaulting to a blank string
|
||||||
|
-------------------------------------------------
|
||||||
|
|
||||||
|
Example::
|
||||||
|
|
||||||
|
WARN[0000] The "DJANGO_AWS_STORAGE_BUCKET_NAME" variable is not set. Defaulting to a blank string.
|
||||||
|
WARN[0000] The "DJANGO_AWS_S3_CUSTOM_DOMAIN" variable is not set. Defaulting to a blank string.
|
||||||
|
|
||||||
|
You have probably opted for Docker + Webpack without Whitenoise. This is a know limitation of the combination, which needs a little bit of manual intervention. See the :ref:`dedicated section about it <webpack-whitenoise-limitation>`.
|
||||||
|
|
||||||
Others
|
Others
|
||||||
------
|
------
|
||||||
|
|
||||||
|
@ -47,5 +57,5 @@ Others
|
||||||
|
|
||||||
#. New apps not getting created in project root: This is the expected behavior, because cookiecutter-django does not change the way that django startapp works, you'll have to fix this manually (see `#1725`_)
|
#. New apps not getting created in project root: This is the expected behavior, because cookiecutter-django does not change the way that django startapp works, you'll have to fix this manually (see `#1725`_)
|
||||||
|
|
||||||
.. _#528: https://github.com/pydanny/cookiecutter-django/issues/528#issuecomment-212650373
|
.. _#528: https://github.com/cookiecutter/cookiecutter-django/issues/528#issuecomment-212650373
|
||||||
.. _#1725: https://github.com/pydanny/cookiecutter-django/issues/1725#issuecomment-407493176
|
.. _#1725: https://github.com/cookiecutter/cookiecutter-django/issues/1725#issuecomment-407493176
|
||||||
|
|
25
docs/websocket.rst
Normal file
25
docs/websocket.rst
Normal file
|
@ -0,0 +1,25 @@
|
||||||
|
.. _websocket:
|
||||||
|
|
||||||
|
=========
|
||||||
|
Websocket
|
||||||
|
=========
|
||||||
|
|
||||||
|
You can enable web sockets if you select ``use_async`` option when creating a project. That indicates whether the project can use web sockets with Uvicorn + Gunicorn.
|
||||||
|
|
||||||
|
Usage
|
||||||
|
-----
|
||||||
|
|
||||||
|
JavaScript example: ::
|
||||||
|
|
||||||
|
> ws = new WebSocket('ws://localhost:8000/') // or 'wss://<mydomain.com>/' in prod
|
||||||
|
WebSocket {url: "ws://localhost:8000/", readyState: 0, bufferedAmount: 0, onopen: null, onerror: null, …}
|
||||||
|
> ws.onmessage = event => console.log(event.data)
|
||||||
|
event => console.log(event.data)
|
||||||
|
> ws.send("ping")
|
||||||
|
undefined
|
||||||
|
pong!
|
||||||
|
|
||||||
|
|
||||||
|
If you don't use Traefik, you might have to configure your reverse proxy accordingly (example with Nginx_).
|
||||||
|
|
||||||
|
.. _Nginx: https://www.nginx.com/blog/websocket-nginx/
|
|
@ -5,10 +5,12 @@ NOTE:
|
||||||
can potentially be run in Python 2.x environment
|
can potentially be run in Python 2.x environment
|
||||||
(at least so we presume in `pre_gen_project.py`).
|
(at least so we presume in `pre_gen_project.py`).
|
||||||
|
|
||||||
TODO: ? restrict Cookiecutter Django project initialization to Python 3.x environments only
|
TODO: restrict Cookiecutter Django project initialization to
|
||||||
|
Python 3.x environments only
|
||||||
"""
|
"""
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
|
import json
|
||||||
import os
|
import os
|
||||||
import random
|
import random
|
||||||
import shutil
|
import shutil
|
||||||
|
@ -59,6 +61,10 @@ def remove_docker_files():
|
||||||
file_names = ["local.yml", "production.yml", ".dockerignore"]
|
file_names = ["local.yml", "production.yml", ".dockerignore"]
|
||||||
for file_name in file_names:
|
for file_name in file_names:
|
||||||
os.remove(file_name)
|
os.remove(file_name)
|
||||||
|
if "{{ cookiecutter.use_pycharm }}".lower() == "y":
|
||||||
|
file_names = ["docker_compose_up_django.xml", "docker_compose_up_docs.xml"]
|
||||||
|
for file_name in file_names:
|
||||||
|
os.remove(os.path.join(".idea", "runConfigurations", file_name))
|
||||||
|
|
||||||
|
|
||||||
def remove_utility_files():
|
def remove_utility_files():
|
||||||
|
@ -75,6 +81,15 @@ def remove_heroku_files():
|
||||||
# don't remove the file if we are using travisci but not using heroku
|
# don't remove the file if we are using travisci but not using heroku
|
||||||
continue
|
continue
|
||||||
os.remove(file_name)
|
os.remove(file_name)
|
||||||
|
remove_heroku_build_hooks()
|
||||||
|
|
||||||
|
|
||||||
|
def remove_heroku_build_hooks():
|
||||||
|
shutil.rmtree("bin")
|
||||||
|
|
||||||
|
|
||||||
|
def remove_sass_files():
|
||||||
|
shutil.rmtree(os.path.join("{{cookiecutter.project_slug}}", "static", "sass"))
|
||||||
|
|
||||||
|
|
||||||
def remove_gulp_files():
|
def remove_gulp_files():
|
||||||
|
@ -83,12 +98,105 @@ def remove_gulp_files():
|
||||||
os.remove(file_name)
|
os.remove(file_name)
|
||||||
|
|
||||||
|
|
||||||
|
def remove_webpack_files():
|
||||||
|
shutil.rmtree("webpack")
|
||||||
|
remove_vendors_js()
|
||||||
|
|
||||||
|
|
||||||
|
def remove_vendors_js():
|
||||||
|
vendors_js_path = os.path.join(
|
||||||
|
"{{ cookiecutter.project_slug }}",
|
||||||
|
"static",
|
||||||
|
"js",
|
||||||
|
"vendors.js",
|
||||||
|
)
|
||||||
|
if os.path.exists(vendors_js_path):
|
||||||
|
os.remove(vendors_js_path)
|
||||||
|
|
||||||
|
|
||||||
def remove_packagejson_file():
|
def remove_packagejson_file():
|
||||||
file_names = ["package.json"]
|
file_names = ["package.json"]
|
||||||
for file_name in file_names:
|
for file_name in file_names:
|
||||||
os.remove(file_name)
|
os.remove(file_name)
|
||||||
|
|
||||||
|
|
||||||
|
def update_package_json(remove_dev_deps=None, remove_keys=None, scripts=None):
|
||||||
|
remove_dev_deps = remove_dev_deps or []
|
||||||
|
remove_keys = remove_keys or []
|
||||||
|
scripts = scripts or {}
|
||||||
|
with open("package.json", mode="r") as fd:
|
||||||
|
content = json.load(fd)
|
||||||
|
for package_name in remove_dev_deps:
|
||||||
|
content["devDependencies"].pop(package_name)
|
||||||
|
for key in remove_keys:
|
||||||
|
content.pop(key)
|
||||||
|
content["scripts"].update(scripts)
|
||||||
|
with open("package.json", mode="w") as fd:
|
||||||
|
json.dump(content, fd, ensure_ascii=False, indent=2)
|
||||||
|
fd.write("\n")
|
||||||
|
|
||||||
|
|
||||||
|
def handle_js_runner(choice, use_docker, use_async):
|
||||||
|
if choice == "Gulp":
|
||||||
|
update_package_json(
|
||||||
|
remove_dev_deps=[
|
||||||
|
"@babel/core",
|
||||||
|
"@babel/preset-env",
|
||||||
|
"babel-loader",
|
||||||
|
"concurrently",
|
||||||
|
"css-loader",
|
||||||
|
"mini-css-extract-plugin",
|
||||||
|
"postcss-loader",
|
||||||
|
"postcss-preset-env",
|
||||||
|
"sass-loader",
|
||||||
|
"webpack",
|
||||||
|
"webpack-bundle-tracker",
|
||||||
|
"webpack-cli",
|
||||||
|
"webpack-dev-server",
|
||||||
|
"webpack-merge",
|
||||||
|
],
|
||||||
|
remove_keys=["babel"],
|
||||||
|
scripts={
|
||||||
|
"dev": "gulp",
|
||||||
|
"build": "gulp generate-assets",
|
||||||
|
},
|
||||||
|
)
|
||||||
|
remove_webpack_files()
|
||||||
|
elif choice == "Webpack":
|
||||||
|
scripts = {
|
||||||
|
"dev": "webpack serve --config webpack/dev.config.js",
|
||||||
|
"build": "webpack --config webpack/prod.config.js",
|
||||||
|
}
|
||||||
|
remove_dev_deps = [
|
||||||
|
"browser-sync",
|
||||||
|
"cssnano",
|
||||||
|
"gulp",
|
||||||
|
"gulp-imagemin",
|
||||||
|
"gulp-plumber",
|
||||||
|
"gulp-postcss",
|
||||||
|
"gulp-rename",
|
||||||
|
"gulp-sass",
|
||||||
|
"gulp-uglify-es",
|
||||||
|
]
|
||||||
|
if not use_docker:
|
||||||
|
dev_django_cmd = (
|
||||||
|
"uvicorn config.asgi:application --reload"
|
||||||
|
if use_async
|
||||||
|
else "python manage.py runserver_plus"
|
||||||
|
)
|
||||||
|
scripts.update(
|
||||||
|
{
|
||||||
|
"dev": "concurrently npm:dev:*",
|
||||||
|
"dev:webpack": "webpack serve --config webpack/dev.config.js",
|
||||||
|
"dev:django": dev_django_cmd,
|
||||||
|
}
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
remove_dev_deps.append("concurrently")
|
||||||
|
update_package_json(remove_dev_deps=remove_dev_deps, scripts=scripts)
|
||||||
|
remove_gulp_files()
|
||||||
|
|
||||||
|
|
||||||
def remove_celery_files():
|
def remove_celery_files():
|
||||||
file_names = [
|
file_names = [
|
||||||
os.path.join("config", "celery_app.py"),
|
os.path.join("config", "celery_app.py"),
|
||||||
|
@ -118,11 +226,8 @@ def remove_dotgitlabciyml_file():
|
||||||
os.remove(".gitlab-ci.yml")
|
os.remove(".gitlab-ci.yml")
|
||||||
|
|
||||||
|
|
||||||
def append_to_project_gitignore(path):
|
def remove_dotgithub_folder():
|
||||||
gitignore_file_path = ".gitignore"
|
shutil.rmtree(".github")
|
||||||
with open(gitignore_file_path, "a") as gitignore_file:
|
|
||||||
gitignore_file.write(path)
|
|
||||||
gitignore_file.write(os.linesep)
|
|
||||||
|
|
||||||
|
|
||||||
def generate_random_string(
|
def generate_random_string(
|
||||||
|
@ -155,8 +260,8 @@ def set_flag(file_path, flag, value=None, formatted=None, *args, **kwargs):
|
||||||
random_string = generate_random_string(*args, **kwargs)
|
random_string = generate_random_string(*args, **kwargs)
|
||||||
if random_string is None:
|
if random_string is None:
|
||||||
print(
|
print(
|
||||||
"We couldn't find a secure pseudo-random number generator on your system. "
|
"We couldn't find a secure pseudo-random number generator on your "
|
||||||
"Please, make sure to manually {} later.".format(flag)
|
"system. Please, make sure to manually {} later.".format(flag)
|
||||||
)
|
)
|
||||||
random_string = flag
|
random_string = flag
|
||||||
if formatted is not None:
|
if formatted is not None:
|
||||||
|
@ -239,10 +344,10 @@ def set_celery_flower_password(file_path, value=None):
|
||||||
return celery_flower_password
|
return celery_flower_password
|
||||||
|
|
||||||
|
|
||||||
def append_to_gitignore_file(s):
|
def append_to_gitignore_file(ignored_line):
|
||||||
with open(".gitignore", "a") as gitignore_file:
|
with open(".gitignore", "a") as gitignore_file:
|
||||||
gitignore_file.write(s)
|
gitignore_file.write(ignored_line)
|
||||||
gitignore_file.write(os.linesep)
|
gitignore_file.write("\n")
|
||||||
|
|
||||||
|
|
||||||
def set_flags_in_envs(postgres_user, celery_flower_user, debug=False):
|
def set_flags_in_envs(postgres_user, celery_flower_user, debug=False):
|
||||||
|
@ -281,6 +386,7 @@ def set_flags_in_settings_files():
|
||||||
def remove_envs_and_associated_files():
|
def remove_envs_and_associated_files():
|
||||||
shutil.rmtree(".envs")
|
shutil.rmtree(".envs")
|
||||||
os.remove("merge_production_dotenvs_in_dotenv.py")
|
os.remove("merge_production_dotenvs_in_dotenv.py")
|
||||||
|
shutil.rmtree("tests")
|
||||||
|
|
||||||
|
|
||||||
def remove_celery_compose_dirs():
|
def remove_celery_compose_dirs():
|
||||||
|
@ -309,6 +415,11 @@ def remove_drf_starter_files():
|
||||||
"{{cookiecutter.project_slug}}", "users", "tests", "test_drf_views.py"
|
"{{cookiecutter.project_slug}}", "users", "tests", "test_drf_views.py"
|
||||||
)
|
)
|
||||||
)
|
)
|
||||||
|
os.remove(
|
||||||
|
os.path.join(
|
||||||
|
"{{cookiecutter.project_slug}}", "users", "tests", "test_swagger.py"
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
def remove_storages_module():
|
def remove_storages_module():
|
||||||
|
@ -340,12 +451,14 @@ def main():
|
||||||
|
|
||||||
if (
|
if (
|
||||||
"{{ cookiecutter.use_docker }}".lower() == "y"
|
"{{ cookiecutter.use_docker }}".lower() == "y"
|
||||||
and "{{ cookiecutter.cloud_provider}}".lower() != "aws"
|
and "{{ cookiecutter.cloud_provider}}" != "AWS"
|
||||||
):
|
):
|
||||||
remove_aws_dockerfile()
|
remove_aws_dockerfile()
|
||||||
|
|
||||||
if "{{ cookiecutter.use_heroku }}".lower() == "n":
|
if "{{ cookiecutter.use_heroku }}".lower() == "n":
|
||||||
remove_heroku_files()
|
remove_heroku_files()
|
||||||
|
elif "{{ cookiecutter.frontend_pipeline }}" != "Django Compressor":
|
||||||
|
remove_heroku_build_hooks()
|
||||||
|
|
||||||
if (
|
if (
|
||||||
"{{ cookiecutter.use_docker }}".lower() == "n"
|
"{{ cookiecutter.use_docker }}".lower() == "n"
|
||||||
|
@ -364,13 +477,21 @@ def main():
|
||||||
if "{{ cookiecutter.keep_local_envs_in_vcs }}".lower() == "y":
|
if "{{ cookiecutter.keep_local_envs_in_vcs }}".lower() == "y":
|
||||||
append_to_gitignore_file("!.envs/.local/")
|
append_to_gitignore_file("!.envs/.local/")
|
||||||
|
|
||||||
if "{{ cookiecutter.js_task_runner}}".lower() == "none":
|
if "{{ cookiecutter.frontend_pipeline }}" in ["None", "Django Compressor"]:
|
||||||
remove_gulp_files()
|
remove_gulp_files()
|
||||||
|
remove_webpack_files()
|
||||||
|
remove_sass_files()
|
||||||
remove_packagejson_file()
|
remove_packagejson_file()
|
||||||
if "{{ cookiecutter.use_docker }}".lower() == "y":
|
if "{{ cookiecutter.use_docker }}".lower() == "y":
|
||||||
remove_node_dockerfile()
|
remove_node_dockerfile()
|
||||||
|
else:
|
||||||
|
handle_js_runner(
|
||||||
|
"{{ cookiecutter.frontend_pipeline }}",
|
||||||
|
use_docker=("{{ cookiecutter.use_docker }}".lower() == "y"),
|
||||||
|
use_async=("{{ cookiecutter.use_async }}".lower() == "y"),
|
||||||
|
)
|
||||||
|
|
||||||
if "{{ cookiecutter.cloud_provider}}".lower() == "none":
|
if "{{ cookiecutter.cloud_provider }}" == "None":
|
||||||
print(
|
print(
|
||||||
WARNING + "You chose not to use a cloud provider, "
|
WARNING + "You chose not to use a cloud provider, "
|
||||||
"media files won't be served in production." + TERMINATOR
|
"media files won't be served in production." + TERMINATOR
|
||||||
|
@ -382,12 +503,15 @@ def main():
|
||||||
if "{{ cookiecutter.use_docker }}".lower() == "y":
|
if "{{ cookiecutter.use_docker }}".lower() == "y":
|
||||||
remove_celery_compose_dirs()
|
remove_celery_compose_dirs()
|
||||||
|
|
||||||
if "{{ cookiecutter.ci_tool }}".lower() != "travis":
|
if "{{ cookiecutter.ci_tool }}" != "Travis":
|
||||||
remove_dottravisyml_file()
|
remove_dottravisyml_file()
|
||||||
|
|
||||||
if "{{ cookiecutter.ci_tool }}".lower() != "gitlab":
|
if "{{ cookiecutter.ci_tool }}" != "Gitlab":
|
||||||
remove_dotgitlabciyml_file()
|
remove_dotgitlabciyml_file()
|
||||||
|
|
||||||
|
if "{{ cookiecutter.ci_tool }}" != "Github":
|
||||||
|
remove_dotgithub_folder()
|
||||||
|
|
||||||
if "{{ cookiecutter.use_drf }}".lower() == "n":
|
if "{{ cookiecutter.use_drf }}".lower() == "n":
|
||||||
remove_drf_starter_files()
|
remove_drf_starter_files()
|
||||||
|
|
||||||
|
|
|
@ -4,7 +4,8 @@ NOTE:
|
||||||
as the whole Cookiecutter Django project initialization
|
as the whole Cookiecutter Django project initialization
|
||||||
can potentially be run in Python 2.x environment.
|
can potentially be run in Python 2.x environment.
|
||||||
|
|
||||||
TODO: ? restrict Cookiecutter Django project initialization to Python 3.x environments only
|
TODO: restrict Cookiecutter Django project initialization
|
||||||
|
to Python 3.x environments only
|
||||||
"""
|
"""
|
||||||
from __future__ import print_function
|
from __future__ import print_function
|
||||||
|
|
||||||
|
@ -35,11 +36,11 @@ if "{{ cookiecutter.use_docker }}".lower() == "n":
|
||||||
if python_major_version == 2:
|
if python_major_version == 2:
|
||||||
print(
|
print(
|
||||||
WARNING + "You're running cookiecutter under Python 2, but the generated "
|
WARNING + "You're running cookiecutter under Python 2, but the generated "
|
||||||
"project requires Python 3.8+. Do you want to proceed (y/n)? " + TERMINATOR
|
"project requires Python 3.10+. Do you want to proceed (y/n)? " + TERMINATOR
|
||||||
)
|
)
|
||||||
yes_options, no_options = frozenset(["y"]), frozenset(["n"])
|
yes_options, no_options = frozenset(["y"]), frozenset(["n"])
|
||||||
while True:
|
while True:
|
||||||
choice = raw_input().lower()
|
choice = raw_input().lower() # noqa: F821
|
||||||
if choice in yes_options:
|
if choice in yes_options:
|
||||||
break
|
break
|
||||||
|
|
||||||
|
@ -65,18 +66,17 @@ if (
|
||||||
and "{{ cookiecutter.cloud_provider }}" == "None"
|
and "{{ cookiecutter.cloud_provider }}" == "None"
|
||||||
):
|
):
|
||||||
print(
|
print(
|
||||||
"You should either use Whitenoise or select a Cloud Provider to serve static files"
|
"You should either use Whitenoise or select a "
|
||||||
|
"Cloud Provider to serve static files"
|
||||||
)
|
)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if (
|
if (
|
||||||
"{{ cookiecutter.cloud_provider }}" == "GCP"
|
"{{ cookiecutter.mail_service }}" == "Amazon SES"
|
||||||
and "{{ cookiecutter.mail_service }}" == "Amazon SES"
|
and "{{ cookiecutter.cloud_provider }}" != "AWS"
|
||||||
) or (
|
|
||||||
"{{ cookiecutter.cloud_provider }}" == "None"
|
|
||||||
and "{{ cookiecutter.mail_service }}" == "Amazon SES"
|
|
||||||
):
|
):
|
||||||
print(
|
print(
|
||||||
"You should either use AWS or select a different Mail Service for sending emails."
|
"You should either use AWS or select a different "
|
||||||
|
"Mail Service for sending emails."
|
||||||
)
|
)
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
[pytest]
|
[pytest]
|
||||||
addopts = -v --tb=short
|
addopts = -v --tb=short
|
||||||
python_paths = .
|
|
||||||
norecursedirs = .tox .git */migrations/* */static/* docs venv */{{cookiecutter.project_slug}}/*
|
norecursedirs = .tox .git */migrations/* */static/* docs venv */{{cookiecutter.project_slug}}/*
|
||||||
|
|
|
@ -1,17 +1,26 @@
|
||||||
cookiecutter==1.7.2
|
cookiecutter==2.1.1
|
||||||
sh==1.13.1
|
sh==1.14.3; sys_platform != "win32"
|
||||||
binaryornot==0.4.4
|
binaryornot==0.4.4
|
||||||
|
|
||||||
# Code quality
|
# Code quality
|
||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
black==19.10b0
|
black==23.1.0
|
||||||
flake8==3.8.2
|
isort==5.12.0
|
||||||
flake8-isort==3.0.0
|
flake8==6.0.0
|
||||||
|
flake8-isort==6.0.0
|
||||||
|
pre-commit==3.0.4
|
||||||
|
|
||||||
# Testing
|
# Testing
|
||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
tox==3.15.1
|
tox==4.4.4
|
||||||
pytest==5.4.3
|
pytest==7.2.1
|
||||||
pytest-cookies==0.5.1
|
pytest-cookies==0.6.1
|
||||||
pytest-instafail==0.4.1.post0
|
pytest-instafail==0.4.2
|
||||||
pyyaml==5.3.1
|
pyyaml==6.0
|
||||||
|
|
||||||
|
# Scripting
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
PyGithub==1.57
|
||||||
|
gitpython==3.1.30
|
||||||
|
jinja2==3.1.2
|
||||||
|
requests==2.28.2
|
||||||
|
|
0
scripts/__init__.py
Normal file
0
scripts/__init__.py
Normal file
320
scripts/create_django_issue.py
Normal file
320
scripts/create_django_issue.py
Normal file
|
@ -0,0 +1,320 @@
|
||||||
|
"""
|
||||||
|
Creates an issue that generates a table for dependency checking whether
|
||||||
|
all packages support the latest Django version. "Latest" does not include
|
||||||
|
patches, only comparing major and minor version numbers.
|
||||||
|
|
||||||
|
This script handles when there are multiple Django versions that need
|
||||||
|
to keep up to date.
|
||||||
|
"""
|
||||||
|
from __future__ import annotations
|
||||||
|
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
import sys
|
||||||
|
from collections.abc import Iterable
|
||||||
|
from pathlib import Path
|
||||||
|
from typing import TYPE_CHECKING, Any, NamedTuple
|
||||||
|
|
||||||
|
import requests
|
||||||
|
from github import Github
|
||||||
|
|
||||||
|
if TYPE_CHECKING:
|
||||||
|
from github.Issue import Issue
|
||||||
|
|
||||||
|
CURRENT_FILE = Path(__file__)
|
||||||
|
ROOT = CURRENT_FILE.parents[1]
|
||||||
|
REQUIREMENTS_DIR = ROOT / "{{cookiecutter.project_slug}}" / "requirements"
|
||||||
|
GITHUB_TOKEN = os.getenv("GITHUB_TOKEN", None)
|
||||||
|
GITHUB_REPO = os.getenv("GITHUB_REPOSITORY", None)
|
||||||
|
|
||||||
|
|
||||||
|
class DjVersion(NamedTuple):
|
||||||
|
"""
|
||||||
|
Wrapper to parse, compare and render Django versions.
|
||||||
|
|
||||||
|
Only keeps track on (major, minor) versions, excluding patches and pre-releases.
|
||||||
|
"""
|
||||||
|
|
||||||
|
major: int
|
||||||
|
minor: int
|
||||||
|
|
||||||
|
def __str__(self) -> str:
|
||||||
|
"""To render as string."""
|
||||||
|
return f"{self.major}.{self.minor}"
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def parse(cls, version_str: str) -> DjVersion:
|
||||||
|
"""Parse interesting values from the version string."""
|
||||||
|
major, minor, *_ = version_str.split(".")
|
||||||
|
return cls(major=int(major), minor=int(minor))
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def parse_to_tuple(cls, version_str: str):
|
||||||
|
version = cls.parse(version_str=version_str)
|
||||||
|
return version.major, version.minor
|
||||||
|
|
||||||
|
|
||||||
|
def get_package_info(package: str) -> dict:
|
||||||
|
"""Get package metadata using PyPI API."""
|
||||||
|
# "django" converts to "Django" on redirect
|
||||||
|
r = requests.get(f"https://pypi.org/pypi/{package}/json", allow_redirects=True)
|
||||||
|
if not r.ok:
|
||||||
|
print(f"Couldn't find package: {package}")
|
||||||
|
sys.exit(1)
|
||||||
|
return r.json()
|
||||||
|
|
||||||
|
|
||||||
|
def get_django_versions() -> Iterable[DjVersion]:
|
||||||
|
"""List all django versions."""
|
||||||
|
django_package_info: dict[str, Any] = get_package_info("django")
|
||||||
|
releases = django_package_info["releases"].keys()
|
||||||
|
for release_str in releases:
|
||||||
|
if release_str.replace(".", "").isdigit():
|
||||||
|
# Exclude pre-releases with non-numeric characters in version
|
||||||
|
yield DjVersion.parse(release_str)
|
||||||
|
|
||||||
|
|
||||||
|
def get_name_and_version(requirements_line: str) -> tuple[str, ...]:
|
||||||
|
"""Get the name a version of a package from a line in the requirement file."""
|
||||||
|
full_name, version = requirements_line.split(" ", 1)[0].split("==")
|
||||||
|
name_without_extras = full_name.split("[", 1)[0]
|
||||||
|
return name_without_extras, version
|
||||||
|
|
||||||
|
|
||||||
|
def get_all_latest_django_versions(
|
||||||
|
django_max_version: tuple[DjVersion] = None,
|
||||||
|
) -> tuple[DjVersion, list[DjVersion]]:
|
||||||
|
"""
|
||||||
|
Grabs all Django versions that are worthy of a GitHub issue.
|
||||||
|
Depends on Django versions having higher major version or minor version.
|
||||||
|
"""
|
||||||
|
_django_max_version = (99, 99)
|
||||||
|
if django_max_version:
|
||||||
|
_django_max_version = django_max_version
|
||||||
|
|
||||||
|
print("Fetching all Django versions from PyPI")
|
||||||
|
base_txt = REQUIREMENTS_DIR / "base.txt"
|
||||||
|
with base_txt.open() as f:
|
||||||
|
for line in f.readlines():
|
||||||
|
if "django==" in line.lower():
|
||||||
|
break
|
||||||
|
else:
|
||||||
|
print(f"django not found in {base_txt}") # Huh...?
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
# Begin parsing and verification
|
||||||
|
_, current_version_str = get_name_and_version(line)
|
||||||
|
# Get a tuple of (major, minor) - ignoring patch version
|
||||||
|
current_minor_version = DjVersion.parse(current_version_str)
|
||||||
|
newer_versions: set[DjVersion] = set()
|
||||||
|
for django_version in get_django_versions():
|
||||||
|
if current_minor_version < django_version <= _django_max_version:
|
||||||
|
newer_versions.add(django_version)
|
||||||
|
|
||||||
|
return current_minor_version, sorted(newer_versions, reverse=True)
|
||||||
|
|
||||||
|
|
||||||
|
_TABLE_HEADER = """
|
||||||
|
|
||||||
|
## {file}.txt
|
||||||
|
|
||||||
|
| Name | Version in Master | {dj_version} Compatible Version | OK |
|
||||||
|
| ---- | :---------------: | :-----------------------------: | :-: |
|
||||||
|
"""
|
||||||
|
VITAL_BUT_UNKNOWN = [
|
||||||
|
"django-environ", # not updated often
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
class GitHubManager:
|
||||||
|
def __init__(self, base_dj_version: DjVersion, needed_dj_versions: list[DjVersion]):
|
||||||
|
self.github = Github(GITHUB_TOKEN)
|
||||||
|
self.repo = self.github.get_repo(GITHUB_REPO)
|
||||||
|
|
||||||
|
self.base_dj_version = base_dj_version
|
||||||
|
self.needed_dj_versions = needed_dj_versions
|
||||||
|
# (major+minor) Version and description
|
||||||
|
self.existing_issues: dict[DjVersion, Issue] = {}
|
||||||
|
|
||||||
|
# Load all requirements from our requirements files and preload their
|
||||||
|
# package information like a cache:
|
||||||
|
self.requirements_files = ["base", "local", "production"]
|
||||||
|
# Format:
|
||||||
|
# requirement file name: {package name: (master_version, package_info)}
|
||||||
|
self.requirements: dict[str, dict[str, tuple[str, dict]]] = {
|
||||||
|
x: {} for x in self.requirements_files
|
||||||
|
}
|
||||||
|
|
||||||
|
def setup(self) -> None:
|
||||||
|
self.load_requirements()
|
||||||
|
self.load_existing_issues()
|
||||||
|
|
||||||
|
def load_requirements(self):
|
||||||
|
print("Reading requirements")
|
||||||
|
for requirements_file in self.requirements_files:
|
||||||
|
with (REQUIREMENTS_DIR / f"{requirements_file}.txt").open() as f:
|
||||||
|
for line in f.readlines():
|
||||||
|
if (
|
||||||
|
"==" in line
|
||||||
|
and not line.startswith("{%")
|
||||||
|
and not line.startswith(" #")
|
||||||
|
and not line.startswith("#")
|
||||||
|
and not line.startswith(" ")
|
||||||
|
):
|
||||||
|
name, version = get_name_and_version(line)
|
||||||
|
self.requirements[requirements_file][name] = (
|
||||||
|
version,
|
||||||
|
get_package_info(name),
|
||||||
|
)
|
||||||
|
|
||||||
|
def load_existing_issues(self):
|
||||||
|
"""Closes the issue if the base Django version is greater than needed"""
|
||||||
|
print("Load existing issues from GitHub")
|
||||||
|
qualifiers = {
|
||||||
|
"repo": GITHUB_REPO,
|
||||||
|
"author": "app/github-actions",
|
||||||
|
"state": "open",
|
||||||
|
"is": "issue",
|
||||||
|
"in": "title",
|
||||||
|
}
|
||||||
|
issues = list(
|
||||||
|
self.github.search_issues(
|
||||||
|
"[Django Update]", "created", "desc", **qualifiers
|
||||||
|
)
|
||||||
|
)
|
||||||
|
print(f"Found {len(issues)} issues matching search")
|
||||||
|
for issue in issues:
|
||||||
|
matches = re.match(r"\[Update Django] Django (\d+.\d+)$", issue.title)
|
||||||
|
if not matches:
|
||||||
|
continue
|
||||||
|
issue_version = DjVersion.parse(matches.group(1))
|
||||||
|
if self.base_dj_version > issue_version:
|
||||||
|
issue.edit(state="closed")
|
||||||
|
print(f"Closed issue {issue.title} (ID: [{issue.id}]({issue.url}))")
|
||||||
|
else:
|
||||||
|
self.existing_issues[issue_version] = issue
|
||||||
|
|
||||||
|
def get_compatibility(
|
||||||
|
self, package_name: str, package_info: dict, needed_dj_version: DjVersion
|
||||||
|
):
|
||||||
|
"""
|
||||||
|
Verify compatibility via setup.py classifiers. If Django is not in the
|
||||||
|
classifiers, then default compatibility is n/a and OK is ✅.
|
||||||
|
|
||||||
|
If it's a package that's vital but known to not be updated often, we give it
|
||||||
|
a ❓. If a package has ❓ or 🕒, then we allow manual update. Automatic updates
|
||||||
|
only include ❌ and ✅.
|
||||||
|
"""
|
||||||
|
# If issue previously existed, find package and skip any gtg, manually
|
||||||
|
# updated packages, or known releases that will happen but haven't yet
|
||||||
|
if issue := self.existing_issues.get(needed_dj_version):
|
||||||
|
if index := issue.body.find(package_name):
|
||||||
|
name, _current, prev_compat, ok = (
|
||||||
|
s.strip() for s in issue.body[index:].split("|", 4)[:4]
|
||||||
|
)
|
||||||
|
if ok in ("✅", "❓", "🕒"):
|
||||||
|
return prev_compat, ok
|
||||||
|
|
||||||
|
if package_name in VITAL_BUT_UNKNOWN:
|
||||||
|
return "", "❓"
|
||||||
|
|
||||||
|
# Check classifiers if it includes Django
|
||||||
|
supported_dj_versions: list[DjVersion] = []
|
||||||
|
for classifier in package_info["info"]["classifiers"]:
|
||||||
|
# Usually in the form of "Framework :: Django :: 3.2"
|
||||||
|
tokens = classifier.split(" ")
|
||||||
|
if len(tokens) >= 5 and tokens[2].lower() == "django":
|
||||||
|
version = DjVersion.parse(tokens[4])
|
||||||
|
if len(version) == 2:
|
||||||
|
supported_dj_versions.append(version)
|
||||||
|
|
||||||
|
if supported_dj_versions:
|
||||||
|
if any(v >= needed_dj_version for v in supported_dj_versions):
|
||||||
|
return package_info["info"]["version"], "✅"
|
||||||
|
else:
|
||||||
|
return "", "❌"
|
||||||
|
|
||||||
|
# Django classifier DNE; assume it isn't a Django lib
|
||||||
|
# Great exceptions include pylint-django, where we need to do this manually...
|
||||||
|
return "n/a", "✅"
|
||||||
|
|
||||||
|
HOME_PAGE_URL_KEYS = [
|
||||||
|
"home_page",
|
||||||
|
"project_url",
|
||||||
|
"docs_url",
|
||||||
|
"package_url",
|
||||||
|
"release_url",
|
||||||
|
"bugtrack_url",
|
||||||
|
]
|
||||||
|
|
||||||
|
def _get_md_home_page_url(self, package_info: dict):
|
||||||
|
urls = [
|
||||||
|
package_info["info"].get(url_key) for url_key in self.HOME_PAGE_URL_KEYS
|
||||||
|
]
|
||||||
|
try:
|
||||||
|
return f"[{{}}]({next(item for item in urls if item)})"
|
||||||
|
except StopIteration:
|
||||||
|
return "{}"
|
||||||
|
|
||||||
|
def generate_markdown(self, needed_dj_version: DjVersion):
|
||||||
|
requirements = f"{needed_dj_version} requirements tables\n\n"
|
||||||
|
for _file in self.requirements_files:
|
||||||
|
requirements += _TABLE_HEADER.format_map(
|
||||||
|
{"file": _file, "dj_version": needed_dj_version}
|
||||||
|
)
|
||||||
|
for package_name, (version, info) in self.requirements[_file].items():
|
||||||
|
compat_version, icon = self.get_compatibility(
|
||||||
|
package_name, info, needed_dj_version
|
||||||
|
)
|
||||||
|
requirements += (
|
||||||
|
f"| {self._get_md_home_page_url(info).format(package_name)} "
|
||||||
|
f"| {version.strip()} "
|
||||||
|
f"| {compat_version.strip()} "
|
||||||
|
f"| {icon} "
|
||||||
|
f"|\n"
|
||||||
|
)
|
||||||
|
|
||||||
|
return requirements
|
||||||
|
|
||||||
|
def create_or_edit_issue(self, needed_dj_version: DjVersion, description: str):
|
||||||
|
if issue := self.existing_issues.get(needed_dj_version):
|
||||||
|
print(f"Editing issue #{issue.number} for Django {needed_dj_version}")
|
||||||
|
issue.edit(body=description)
|
||||||
|
else:
|
||||||
|
print(f"Creating new issue for Django {needed_dj_version}")
|
||||||
|
issue = self.repo.create_issue(
|
||||||
|
f"[Update Django] Django {needed_dj_version}", description
|
||||||
|
)
|
||||||
|
issue.add_to_labels(f"django{needed_dj_version}")
|
||||||
|
|
||||||
|
def generate(self):
|
||||||
|
for version in self.needed_dj_versions:
|
||||||
|
print(f"Handling GitHub issue for Django {version}")
|
||||||
|
md_content = self.generate_markdown(version)
|
||||||
|
print(f"Generated markdown:\n\n{md_content}")
|
||||||
|
self.create_or_edit_issue(version, md_content)
|
||||||
|
|
||||||
|
|
||||||
|
def main(django_max_version=None) -> None:
|
||||||
|
# Check if there are any djs
|
||||||
|
current_dj, latest_djs = get_all_latest_django_versions(
|
||||||
|
django_max_version=django_max_version
|
||||||
|
)
|
||||||
|
if not latest_djs:
|
||||||
|
sys.exit(0)
|
||||||
|
manager = GitHubManager(current_dj, latest_djs)
|
||||||
|
manager.setup()
|
||||||
|
manager.generate()
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
if GITHUB_REPO is None:
|
||||||
|
raise RuntimeError(
|
||||||
|
"No github repo, please set the environment variable GITHUB_REPOSITORY"
|
||||||
|
)
|
||||||
|
max_version = None
|
||||||
|
last_arg = sys.argv[-1]
|
||||||
|
if CURRENT_FILE.name not in last_arg:
|
||||||
|
max_version = DjVersion.parse_to_tuple(version_str=last_arg)
|
||||||
|
|
||||||
|
main(django_max_version=max_version)
|
164
scripts/update_changelog.py
Normal file
164
scripts/update_changelog.py
Normal file
|
@ -0,0 +1,164 @@
|
||||||
|
import datetime as dt
|
||||||
|
import os
|
||||||
|
import re
|
||||||
|
from collections.abc import Iterable
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import git
|
||||||
|
import github.PullRequest
|
||||||
|
import github.Repository
|
||||||
|
from github import Github
|
||||||
|
from jinja2 import Template
|
||||||
|
|
||||||
|
CURRENT_FILE = Path(__file__)
|
||||||
|
ROOT = CURRENT_FILE.parents[1]
|
||||||
|
GITHUB_TOKEN = os.getenv("GITHUB_TOKEN")
|
||||||
|
GITHUB_REPO = os.getenv("GITHUB_REPOSITORY")
|
||||||
|
GIT_BRANCH = os.getenv("GITHUB_REF_NAME")
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> None:
|
||||||
|
"""
|
||||||
|
Script entry point.
|
||||||
|
"""
|
||||||
|
# Generate changelog for PRs merged yesterday
|
||||||
|
merged_date = dt.date.today() - dt.timedelta(days=1)
|
||||||
|
repo = Github(login_or_token=GITHUB_TOKEN).get_repo(GITHUB_REPO)
|
||||||
|
merged_pulls = list(iter_pulls(repo, merged_date))
|
||||||
|
print(f"Merged pull requests: {merged_pulls}")
|
||||||
|
if not merged_pulls:
|
||||||
|
print("Nothing was merged, existing.")
|
||||||
|
return
|
||||||
|
|
||||||
|
# Group pull requests by type of change
|
||||||
|
grouped_pulls = group_pulls_by_change_type(merged_pulls)
|
||||||
|
|
||||||
|
# Generate portion of markdown
|
||||||
|
release_changes_summary = generate_md(grouped_pulls)
|
||||||
|
print(f"Summary of changes: {release_changes_summary}")
|
||||||
|
|
||||||
|
# Update CHANGELOG.md file
|
||||||
|
release = f"{merged_date:%Y.%m.%d}"
|
||||||
|
changelog_path = ROOT / "CHANGELOG.md"
|
||||||
|
write_changelog(changelog_path, release, release_changes_summary)
|
||||||
|
print(f"Wrote {changelog_path}")
|
||||||
|
|
||||||
|
# Update version
|
||||||
|
setup_py_path = ROOT / "setup.py"
|
||||||
|
update_version(setup_py_path, release)
|
||||||
|
print(f"Updated version in {setup_py_path}")
|
||||||
|
|
||||||
|
# Commit changes, create tag and push
|
||||||
|
update_git_repo([changelog_path, setup_py_path], release)
|
||||||
|
|
||||||
|
# Create GitHub release
|
||||||
|
github_release = repo.create_git_release(
|
||||||
|
tag=release,
|
||||||
|
name=release,
|
||||||
|
message=release_changes_summary,
|
||||||
|
)
|
||||||
|
print(f"Created release on GitHub {github_release}")
|
||||||
|
|
||||||
|
|
||||||
|
def iter_pulls(
|
||||||
|
repo: github.Repository.Repository,
|
||||||
|
merged_date: dt.date,
|
||||||
|
) -> Iterable[github.PullRequest.PullRequest]:
|
||||||
|
"""Fetch merged pull requests at the date we're interested in."""
|
||||||
|
recent_pulls = repo.get_pulls(
|
||||||
|
state="closed",
|
||||||
|
sort="updated",
|
||||||
|
direction="desc",
|
||||||
|
).get_page(0)
|
||||||
|
for pull in recent_pulls:
|
||||||
|
if pull.merged and pull.merged_at.date() == merged_date:
|
||||||
|
yield pull
|
||||||
|
|
||||||
|
|
||||||
|
def group_pulls_by_change_type(
|
||||||
|
pull_requests_list: list[github.PullRequest.PullRequest],
|
||||||
|
) -> dict[str, list[github.PullRequest.PullRequest]]:
|
||||||
|
"""Group pull request by change type."""
|
||||||
|
grouped_pulls = {
|
||||||
|
"Changed": [],
|
||||||
|
"Fixed": [],
|
||||||
|
"Documentation": [],
|
||||||
|
"Updated": [],
|
||||||
|
}
|
||||||
|
for pull in pull_requests_list:
|
||||||
|
label_names = {label.name for label in pull.labels}
|
||||||
|
if "project infrastructure" in label_names:
|
||||||
|
# Don't mention it in the changelog
|
||||||
|
continue
|
||||||
|
if "update" in label_names:
|
||||||
|
group_name = "Updated"
|
||||||
|
elif "bug" in label_names:
|
||||||
|
group_name = "Fixed"
|
||||||
|
elif "docs" in label_names:
|
||||||
|
group_name = "Documentation"
|
||||||
|
else:
|
||||||
|
group_name = "Changed"
|
||||||
|
grouped_pulls[group_name].append(pull)
|
||||||
|
return grouped_pulls
|
||||||
|
|
||||||
|
|
||||||
|
def generate_md(grouped_pulls: dict[str, list[github.PullRequest.PullRequest]]) -> str:
|
||||||
|
"""Generate markdown file from Jinja template."""
|
||||||
|
changelog_template = ROOT / ".github" / "changelog-template.md"
|
||||||
|
template = Template(changelog_template.read_text(), autoescape=True)
|
||||||
|
return template.render(grouped_pulls=grouped_pulls)
|
||||||
|
|
||||||
|
|
||||||
|
def write_changelog(file_path: Path, release: str, content: str) -> None:
|
||||||
|
"""Write Release details to the changelog file."""
|
||||||
|
content = f"## {release}\n{content}"
|
||||||
|
old_content = file_path.read_text()
|
||||||
|
updated_content = old_content.replace(
|
||||||
|
"<!-- GENERATOR_PLACEHOLDER -->",
|
||||||
|
f"<!-- GENERATOR_PLACEHOLDER -->\n\n{content}",
|
||||||
|
)
|
||||||
|
file_path.write_text(updated_content)
|
||||||
|
|
||||||
|
|
||||||
|
def update_version(file_path: Path, release: str) -> None:
|
||||||
|
"""Update template version in setup.py."""
|
||||||
|
old_content = file_path.read_text()
|
||||||
|
updated_content = re.sub(
|
||||||
|
r'\nversion = "\d+\.\d+\.\d+"\n',
|
||||||
|
f'\nversion = "{release}"\n',
|
||||||
|
old_content,
|
||||||
|
)
|
||||||
|
file_path.write_text(updated_content)
|
||||||
|
|
||||||
|
|
||||||
|
def update_git_repo(paths: list[Path], release: str) -> None:
|
||||||
|
"""Commit, tag changes in git repo and push to origin."""
|
||||||
|
repo = git.Repo(ROOT)
|
||||||
|
for path in paths:
|
||||||
|
repo.git.add(path)
|
||||||
|
message = f"Release {release}"
|
||||||
|
|
||||||
|
user = repo.git.config("--get", "user.name")
|
||||||
|
email = repo.git.config("--get", "user.email")
|
||||||
|
|
||||||
|
repo.git.commit(
|
||||||
|
m=message,
|
||||||
|
author=f"{user} <{email}>",
|
||||||
|
)
|
||||||
|
repo.git.tag("-a", release, m=message)
|
||||||
|
server = f"https://{GITHUB_TOKEN}@github.com/{GITHUB_REPO}.git"
|
||||||
|
print(f"Pushing changes to {GIT_BRANCH} branch of {GITHUB_REPO}")
|
||||||
|
repo.git.push(server, GIT_BRANCH)
|
||||||
|
repo.git.push("--tags", server, GIT_BRANCH)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
if GITHUB_REPO is None:
|
||||||
|
raise RuntimeError(
|
||||||
|
"No github repo, please set the environment variable GITHUB_REPOSITORY"
|
||||||
|
)
|
||||||
|
if GIT_BRANCH is None:
|
||||||
|
raise RuntimeError(
|
||||||
|
"No git branch set, please set the GITHUB_REF_NAME environment variable"
|
||||||
|
)
|
||||||
|
main()
|
112
scripts/update_contributors.py
Normal file
112
scripts/update_contributors.py
Normal file
|
@ -0,0 +1,112 @@
|
||||||
|
import json
|
||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
from github import Github
|
||||||
|
from github.NamedUser import NamedUser
|
||||||
|
from jinja2 import Template
|
||||||
|
|
||||||
|
CURRENT_FILE = Path(__file__)
|
||||||
|
ROOT = CURRENT_FILE.parents[1]
|
||||||
|
BOT_LOGINS = ["pyup-bot"]
|
||||||
|
GITHUB_TOKEN = os.getenv("GITHUB_TOKEN", None)
|
||||||
|
GITHUB_REPO = os.getenv("GITHUB_REPOSITORY", None)
|
||||||
|
|
||||||
|
|
||||||
|
def main() -> None:
|
||||||
|
"""
|
||||||
|
Script entry point.
|
||||||
|
|
||||||
|
1. Fetch recent contributors from the Github API
|
||||||
|
2. Add missing ones to the JSON file
|
||||||
|
3. Generate Markdown from JSON file
|
||||||
|
"""
|
||||||
|
recent_authors = set(iter_recent_authors())
|
||||||
|
|
||||||
|
# Add missing users to the JSON file
|
||||||
|
contrib_file = ContributorsJSONFile()
|
||||||
|
for author in recent_authors:
|
||||||
|
print(f"Checking if {author.login} should be added")
|
||||||
|
if author.login not in contrib_file:
|
||||||
|
contrib_file.add_contributor(author)
|
||||||
|
print(f"Added {author.login} to contributors")
|
||||||
|
contrib_file.save()
|
||||||
|
|
||||||
|
# Generate MD file from JSON file
|
||||||
|
write_md_file(contrib_file.content)
|
||||||
|
|
||||||
|
|
||||||
|
def iter_recent_authors():
|
||||||
|
"""
|
||||||
|
Fetch users who opened recently merged pull requests.
|
||||||
|
|
||||||
|
Use Github API to fetch recent authors rather than
|
||||||
|
git CLI to work with Github usernames.
|
||||||
|
"""
|
||||||
|
repo = Github(login_or_token=GITHUB_TOKEN, per_page=5).get_repo(GITHUB_REPO)
|
||||||
|
recent_pulls = repo.get_pulls(
|
||||||
|
state="closed", sort="updated", direction="desc"
|
||||||
|
).get_page(0)
|
||||||
|
for pull in recent_pulls:
|
||||||
|
if (
|
||||||
|
pull.merged
|
||||||
|
and pull.user.type == "User"
|
||||||
|
and pull.user.login not in BOT_LOGINS
|
||||||
|
):
|
||||||
|
yield pull.user
|
||||||
|
|
||||||
|
|
||||||
|
class ContributorsJSONFile:
|
||||||
|
"""Helper to interact with the JSON file."""
|
||||||
|
|
||||||
|
file_path = ROOT / ".github" / "contributors.json"
|
||||||
|
content = None
|
||||||
|
|
||||||
|
def __init__(self) -> None:
|
||||||
|
"""Read initial content."""
|
||||||
|
self.content = json.loads(self.file_path.read_text())
|
||||||
|
|
||||||
|
def __contains__(self, github_login: str):
|
||||||
|
"""Provide a nice API to do: `username in file`."""
|
||||||
|
return any(
|
||||||
|
# Github usernames are case insensitive
|
||||||
|
github_login.lower() == contrib["github_login"].lower()
|
||||||
|
for contrib in self.content
|
||||||
|
)
|
||||||
|
|
||||||
|
def add_contributor(self, user: NamedUser):
|
||||||
|
"""Append the contributor data we care about at the end."""
|
||||||
|
contributor_data = {
|
||||||
|
"name": user.name or user.login,
|
||||||
|
"github_login": user.login,
|
||||||
|
"twitter_username": user.twitter_username or "",
|
||||||
|
}
|
||||||
|
self.content.append(contributor_data)
|
||||||
|
|
||||||
|
def save(self):
|
||||||
|
"""Write the file to disk with indentation."""
|
||||||
|
text_content = json.dumps(self.content, indent=2, ensure_ascii=False)
|
||||||
|
self.file_path.write_text(text_content)
|
||||||
|
|
||||||
|
|
||||||
|
def write_md_file(contributors):
|
||||||
|
"""Generate markdown file from Jinja template."""
|
||||||
|
contributors_template = ROOT / ".github" / "CONTRIBUTORS-template.md"
|
||||||
|
template = Template(contributors_template.read_text(), autoescape=True)
|
||||||
|
core_contributors = [c for c in contributors if c.get("is_core", False)]
|
||||||
|
other_contributors = (c for c in contributors if not c.get("is_core", False))
|
||||||
|
other_contributors = sorted(other_contributors, key=lambda c: c["name"].lower())
|
||||||
|
content = template.render(
|
||||||
|
core_contributors=core_contributors, other_contributors=other_contributors
|
||||||
|
)
|
||||||
|
|
||||||
|
file_path = ROOT / "CONTRIBUTORS.md"
|
||||||
|
file_path.write_text(content)
|
||||||
|
|
||||||
|
|
||||||
|
if __name__ == "__main__":
|
||||||
|
if GITHUB_REPO is None:
|
||||||
|
raise RuntimeError(
|
||||||
|
"No github repo, please set the environment variable GITHUB_REPOSITORY"
|
||||||
|
)
|
||||||
|
main()
|
7
setup.cfg
Normal file
7
setup.cfg
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
[flake8]
|
||||||
|
exclude = docs
|
||||||
|
max-line-length = 88
|
||||||
|
|
||||||
|
[isort]
|
||||||
|
profile = black
|
||||||
|
known_first_party = tests,scripts,hooks
|
25
setup.py
25
setup.py
|
@ -1,21 +1,11 @@
|
||||||
#!/usr/bin/env python
|
#!/usr/bin/env python
|
||||||
|
|
||||||
import os
|
|
||||||
import sys
|
|
||||||
|
|
||||||
try:
|
try:
|
||||||
from setuptools import setup
|
from setuptools import setup
|
||||||
except ImportError:
|
except ImportError:
|
||||||
from distutils.core import setup
|
from distutils.core import setup
|
||||||
|
|
||||||
# Our version ALWAYS matches the version of Django we support
|
# We use calendar versioning
|
||||||
# If Django has a new release, we branch, tag, then update this setting after the tag.
|
version = "2023.02.05"
|
||||||
version = "3.0.5-01"
|
|
||||||
|
|
||||||
if sys.argv[-1] == "tag":
|
|
||||||
os.system(f'git tag -a {version} -m "version {version}"')
|
|
||||||
os.system("git push --tags")
|
|
||||||
sys.exit()
|
|
||||||
|
|
||||||
with open("README.rst") as readme_file:
|
with open("README.rst") as readme_file:
|
||||||
long_description = readme_file.read()
|
long_description = readme_file.read()
|
||||||
|
@ -23,24 +13,27 @@ with open("README.rst") as readme_file:
|
||||||
setup(
|
setup(
|
||||||
name="cookiecutter-django",
|
name="cookiecutter-django",
|
||||||
version=version,
|
version=version,
|
||||||
description="A Cookiecutter template for creating production-ready Django projects quickly.",
|
description=(
|
||||||
|
"A Cookiecutter template for creating production-ready "
|
||||||
|
"Django projects quickly."
|
||||||
|
),
|
||||||
long_description=long_description,
|
long_description=long_description,
|
||||||
author="Daniel Roy Greenfeld",
|
author="Daniel Roy Greenfeld",
|
||||||
author_email="pydanny@gmail.com",
|
author_email="pydanny@gmail.com",
|
||||||
url="https://github.com/pydanny/cookiecutter-django",
|
url="https://github.com/cookiecutter/cookiecutter-django",
|
||||||
packages=[],
|
packages=[],
|
||||||
license="BSD",
|
license="BSD",
|
||||||
zip_safe=False,
|
zip_safe=False,
|
||||||
classifiers=[
|
classifiers=[
|
||||||
"Development Status :: 4 - Beta",
|
"Development Status :: 4 - Beta",
|
||||||
"Environment :: Console",
|
"Environment :: Console",
|
||||||
"Framework :: Django :: 3.0",
|
"Framework :: Django :: 4.0",
|
||||||
"Intended Audience :: Developers",
|
"Intended Audience :: Developers",
|
||||||
"Natural Language :: English",
|
"Natural Language :: English",
|
||||||
"License :: OSI Approved :: BSD License",
|
"License :: OSI Approved :: BSD License",
|
||||||
"Programming Language :: Python",
|
"Programming Language :: Python",
|
||||||
"Programming Language :: Python :: 3",
|
"Programming Language :: Python :: 3",
|
||||||
"Programming Language :: Python :: 3.8",
|
"Programming Language :: Python :: 3.10",
|
||||||
"Programming Language :: Python :: Implementation :: CPython",
|
"Programming Language :: Python :: Implementation :: CPython",
|
||||||
"Topic :: Software Development",
|
"Topic :: Software Development",
|
||||||
],
|
],
|
||||||
|
|
0
tests/__init__.py
Normal file
0
tests/__init__.py
Normal file
|
@ -1,19 +1,17 @@
|
||||||
#!/bin/sh
|
#!/bin/sh
|
||||||
# this is a very simple script that tests the docker configuration for cookiecutter-django
|
# this is a very simple script that tests the docker configuration for cookiecutter-django
|
||||||
# it is meant to be run from the root directory of the repository, eg:
|
# it is meant to be run from the root directory of the repository, eg:
|
||||||
# sh tests/test_docker.sh
|
# sh tests/test_bare.sh
|
||||||
|
|
||||||
set -o errexit
|
set -o errexit
|
||||||
|
set -x
|
||||||
# install test requirements
|
|
||||||
pip install -r requirements.txt
|
|
||||||
|
|
||||||
# create a cache directory
|
# create a cache directory
|
||||||
mkdir -p .cache/bare
|
mkdir -p .cache/bare
|
||||||
cd .cache/bare
|
cd .cache/bare
|
||||||
|
|
||||||
# create the project using the default settings in cookiecutter.json
|
# create the project using the default settings in cookiecutter.json
|
||||||
cookiecutter ../../ --no-input --overwrite-if-exists use_docker=n $@
|
cookiecutter ../../ --no-input --overwrite-if-exists use_docker=n "$@"
|
||||||
cd my_awesome_project
|
cd my_awesome_project
|
||||||
|
|
||||||
# Install OS deps
|
# Install OS deps
|
||||||
|
@ -22,5 +20,24 @@ sudo utility/install_os_dependencies.sh install
|
||||||
# Install Python deps
|
# Install Python deps
|
||||||
pip install -r requirements/local.txt
|
pip install -r requirements/local.txt
|
||||||
|
|
||||||
|
# Lint by running pre-commit on all files
|
||||||
|
# Needs a git repo to find the project root
|
||||||
|
git init
|
||||||
|
git add .
|
||||||
|
pre-commit run --show-diff-on-failure -a
|
||||||
|
|
||||||
# run the project's tests
|
# run the project's tests
|
||||||
pytest
|
pytest
|
||||||
|
|
||||||
|
# Make sure the check doesn't raise any warnings
|
||||||
|
python manage.py check --fail-level WARNING
|
||||||
|
|
||||||
|
# Run npm build script if package.json is present
|
||||||
|
if [ -f "package.json" ]
|
||||||
|
then
|
||||||
|
npm install
|
||||||
|
npm run build
|
||||||
|
fi
|
||||||
|
|
||||||
|
# Generate the HTML for the documentation
|
||||||
|
cd docs && make html
|
||||||
|
|
|
@ -1,15 +1,25 @@
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
|
import sys
|
||||||
|
|
||||||
import pytest
|
import pytest
|
||||||
from cookiecutter.exceptions import FailedHookException
|
|
||||||
import sh
|
try:
|
||||||
|
import sh
|
||||||
|
except (ImportError, ModuleNotFoundError):
|
||||||
|
sh = None # sh doesn't support Windows
|
||||||
import yaml
|
import yaml
|
||||||
from binaryornot.check import is_binary
|
from binaryornot.check import is_binary
|
||||||
|
from cookiecutter.exceptions import FailedHookException
|
||||||
|
|
||||||
PATTERN = r"{{(\s?cookiecutter)[.](.*?)}}"
|
PATTERN = r"{{(\s?cookiecutter)[.](.*?)}}"
|
||||||
RE_OBJ = re.compile(PATTERN)
|
RE_OBJ = re.compile(PATTERN)
|
||||||
|
|
||||||
|
if sys.platform.startswith("win"):
|
||||||
|
pytest.skip("sh doesn't support windows", allow_module_level=True)
|
||||||
|
elif sys.platform.startswith("darwin") and os.getenv("CI"):
|
||||||
|
pytest.skip("skipping slow macOS tests on CI", allow_module_level=True)
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
def context():
|
def context():
|
||||||
|
@ -37,15 +47,17 @@ SUPPORTED_COMBINATIONS = [
|
||||||
{"use_pycharm": "n"},
|
{"use_pycharm": "n"},
|
||||||
{"use_docker": "y"},
|
{"use_docker": "y"},
|
||||||
{"use_docker": "n"},
|
{"use_docker": "n"},
|
||||||
{"postgresql_version": "11.3"},
|
{"postgresql_version": "14"},
|
||||||
{"postgresql_version": "10.8"},
|
{"postgresql_version": "13"},
|
||||||
{"postgresql_version": "9.6"},
|
{"postgresql_version": "12"},
|
||||||
{"postgresql_version": "9.5"},
|
{"postgresql_version": "11"},
|
||||||
{"postgresql_version": "9.4"},
|
{"postgresql_version": "10"},
|
||||||
{"cloud_provider": "AWS", "use_whitenoise": "y"},
|
{"cloud_provider": "AWS", "use_whitenoise": "y"},
|
||||||
{"cloud_provider": "AWS", "use_whitenoise": "n"},
|
{"cloud_provider": "AWS", "use_whitenoise": "n"},
|
||||||
{"cloud_provider": "GCP", "use_whitenoise": "y"},
|
{"cloud_provider": "GCP", "use_whitenoise": "y"},
|
||||||
{"cloud_provider": "GCP", "use_whitenoise": "n"},
|
{"cloud_provider": "GCP", "use_whitenoise": "n"},
|
||||||
|
{"cloud_provider": "Azure", "use_whitenoise": "y"},
|
||||||
|
{"cloud_provider": "Azure", "use_whitenoise": "n"},
|
||||||
{"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "Mailgun"},
|
{"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "Mailgun"},
|
||||||
{"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "Mailjet"},
|
{"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "Mailjet"},
|
||||||
{"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "Mandrill"},
|
{"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "Mandrill"},
|
||||||
|
@ -72,17 +84,24 @@ SUPPORTED_COMBINATIONS = [
|
||||||
{"cloud_provider": "GCP", "mail_service": "SendinBlue"},
|
{"cloud_provider": "GCP", "mail_service": "SendinBlue"},
|
||||||
{"cloud_provider": "GCP", "mail_service": "SparkPost"},
|
{"cloud_provider": "GCP", "mail_service": "SparkPost"},
|
||||||
{"cloud_provider": "GCP", "mail_service": "Other SMTP"},
|
{"cloud_provider": "GCP", "mail_service": "Other SMTP"},
|
||||||
# Note: cloud_providers GCP and None with mail_service Amazon SES is not supported
|
{"cloud_provider": "Azure", "mail_service": "Mailgun"},
|
||||||
|
{"cloud_provider": "Azure", "mail_service": "Mailjet"},
|
||||||
|
{"cloud_provider": "Azure", "mail_service": "Mandrill"},
|
||||||
|
{"cloud_provider": "Azure", "mail_service": "Postmark"},
|
||||||
|
{"cloud_provider": "Azure", "mail_service": "Sendgrid"},
|
||||||
|
{"cloud_provider": "Azure", "mail_service": "SendinBlue"},
|
||||||
|
{"cloud_provider": "Azure", "mail_service": "SparkPost"},
|
||||||
|
{"cloud_provider": "Azure", "mail_service": "Other SMTP"},
|
||||||
|
# Note: cloud_providers GCP, Azure, and None
|
||||||
|
# with mail_service Amazon SES is not supported
|
||||||
{"use_async": "y"},
|
{"use_async": "y"},
|
||||||
{"use_async": "n"},
|
{"use_async": "n"},
|
||||||
{"use_drf": "y"},
|
{"use_drf": "y"},
|
||||||
{"use_drf": "n"},
|
{"use_drf": "n"},
|
||||||
{"js_task_runner": "None"},
|
{"frontend_pipeline": "None"},
|
||||||
{"js_task_runner": "Gulp"},
|
{"frontend_pipeline": "Django Compressor"},
|
||||||
{"custom_bootstrap_compilation": "y"},
|
{"frontend_pipeline": "Gulp"},
|
||||||
{"custom_bootstrap_compilation": "n"},
|
{"frontend_pipeline": "Webpack"},
|
||||||
{"use_compressor": "y"},
|
|
||||||
{"use_compressor": "n"},
|
|
||||||
{"use_celery": "y"},
|
{"use_celery": "y"},
|
||||||
{"use_celery": "n"},
|
{"use_celery": "n"},
|
||||||
{"use_mailhog": "y"},
|
{"use_mailhog": "y"},
|
||||||
|
@ -96,6 +115,7 @@ SUPPORTED_COMBINATIONS = [
|
||||||
{"ci_tool": "None"},
|
{"ci_tool": "None"},
|
||||||
{"ci_tool": "Travis"},
|
{"ci_tool": "Travis"},
|
||||||
{"ci_tool": "Gitlab"},
|
{"ci_tool": "Gitlab"},
|
||||||
|
{"ci_tool": "Github"},
|
||||||
{"keep_local_envs_in_vcs": "y"},
|
{"keep_local_envs_in_vcs": "y"},
|
||||||
{"keep_local_envs_in_vcs": "n"},
|
{"keep_local_envs_in_vcs": "n"},
|
||||||
{"debug": "y"},
|
{"debug": "y"},
|
||||||
|
@ -105,20 +125,21 @@ SUPPORTED_COMBINATIONS = [
|
||||||
UNSUPPORTED_COMBINATIONS = [
|
UNSUPPORTED_COMBINATIONS = [
|
||||||
{"cloud_provider": "None", "use_whitenoise": "n"},
|
{"cloud_provider": "None", "use_whitenoise": "n"},
|
||||||
{"cloud_provider": "GCP", "mail_service": "Amazon SES"},
|
{"cloud_provider": "GCP", "mail_service": "Amazon SES"},
|
||||||
|
{"cloud_provider": "Azure", "mail_service": "Amazon SES"},
|
||||||
{"cloud_provider": "None", "mail_service": "Amazon SES"},
|
{"cloud_provider": "None", "mail_service": "Amazon SES"},
|
||||||
]
|
]
|
||||||
|
|
||||||
|
|
||||||
def _fixture_id(ctx):
|
def _fixture_id(ctx):
|
||||||
"""Helper to get a user friendly test name from the parametrized context."""
|
"""Helper to get a user-friendly test name from the parametrized context."""
|
||||||
return "-".join(f"{key}:{value}" for key, value in ctx.items())
|
return "-".join(f"{key}:{value}" for key, value in ctx.items())
|
||||||
|
|
||||||
|
|
||||||
def build_files_list(root_dir):
|
def build_files_list(base_dir):
|
||||||
"""Build a list containing absolute paths to the generated files."""
|
"""Build a list containing absolute paths to the generated files."""
|
||||||
return [
|
return [
|
||||||
os.path.join(dirpath, file_path)
|
os.path.join(dirpath, file_path)
|
||||||
for dirpath, subdirs, files in os.walk(root_dir)
|
for dirpath, subdirs, files in os.walk(base_dir)
|
||||||
for file_path in files
|
for file_path in files
|
||||||
]
|
]
|
||||||
|
|
||||||
|
@ -130,22 +151,22 @@ def check_paths(paths):
|
||||||
if is_binary(path):
|
if is_binary(path):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
for line in open(path, "r"):
|
for line in open(path):
|
||||||
match = RE_OBJ.search(line)
|
match = RE_OBJ.search(line)
|
||||||
msg = "cookiecutter variable not replaced in {}"
|
assert match is None, f"cookiecutter variable not replaced in {path}"
|
||||||
assert match is None, msg.format(path)
|
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("context_override", SUPPORTED_COMBINATIONS, ids=_fixture_id)
|
@pytest.mark.parametrize("context_override", SUPPORTED_COMBINATIONS, ids=_fixture_id)
|
||||||
def test_project_generation(cookies, context, context_override):
|
def test_project_generation(cookies, context, context_override):
|
||||||
"""Test that project is generated and fully rendered."""
|
"""Test that project is generated and fully rendered."""
|
||||||
|
|
||||||
result = cookies.bake(extra_context={**context, **context_override})
|
result = cookies.bake(extra_context={**context, **context_override})
|
||||||
assert result.exit_code == 0
|
assert result.exit_code == 0
|
||||||
assert result.exception is None
|
assert result.exception is None
|
||||||
assert result.project.basename == context["project_slug"]
|
assert result.project_path.name == context["project_slug"]
|
||||||
assert result.project.isdir()
|
assert result.project_path.is_dir()
|
||||||
|
|
||||||
paths = build_files_list(str(result.project))
|
paths = build_files_list(str(result.project_path))
|
||||||
assert paths
|
assert paths
|
||||||
check_paths(paths)
|
check_paths(paths)
|
||||||
|
|
||||||
|
@ -156,7 +177,7 @@ def test_flake8_passes(cookies, context_override):
|
||||||
result = cookies.bake(extra_context=context_override)
|
result = cookies.bake(extra_context=context_override)
|
||||||
|
|
||||||
try:
|
try:
|
||||||
sh.flake8(_cwd=str(result.project))
|
sh.flake8(_cwd=str(result.project_path))
|
||||||
except sh.ErrorReturnCode as e:
|
except sh.ErrorReturnCode as e:
|
||||||
pytest.fail(e.stdout.decode())
|
pytest.fail(e.stdout.decode())
|
||||||
|
|
||||||
|
@ -168,7 +189,12 @@ def test_black_passes(cookies, context_override):
|
||||||
|
|
||||||
try:
|
try:
|
||||||
sh.black(
|
sh.black(
|
||||||
"--check", "--diff", "--exclude", "migrations", _cwd=str(result.project)
|
"--check",
|
||||||
|
"--diff",
|
||||||
|
"--exclude",
|
||||||
|
"migrations",
|
||||||
|
".",
|
||||||
|
_cwd=str(result.project_path),
|
||||||
)
|
)
|
||||||
except sh.ErrorReturnCode as e:
|
except sh.ErrorReturnCode as e:
|
||||||
pytest.fail(e.stdout.decode())
|
pytest.fail(e.stdout.decode())
|
||||||
|
@ -176,7 +202,10 @@ def test_black_passes(cookies, context_override):
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
["use_docker", "expected_test_script"],
|
["use_docker", "expected_test_script"],
|
||||||
[("n", "pytest"), ("y", "docker-compose -f local.yml run django pytest"),],
|
[
|
||||||
|
("n", "pytest"),
|
||||||
|
("y", "docker-compose -f local.yml run django pytest"),
|
||||||
|
],
|
||||||
)
|
)
|
||||||
def test_travis_invokes_pytest(cookies, context, use_docker, expected_test_script):
|
def test_travis_invokes_pytest(cookies, context, use_docker, expected_test_script):
|
||||||
context.update({"ci_tool": "Travis", "use_docker": use_docker})
|
context.update({"ci_tool": "Travis", "use_docker": use_docker})
|
||||||
|
@ -184,12 +213,12 @@ def test_travis_invokes_pytest(cookies, context, use_docker, expected_test_scrip
|
||||||
|
|
||||||
assert result.exit_code == 0
|
assert result.exit_code == 0
|
||||||
assert result.exception is None
|
assert result.exception is None
|
||||||
assert result.project.basename == context["project_slug"]
|
assert result.project_path.name == context["project_slug"]
|
||||||
assert result.project.isdir()
|
assert result.project_path.is_dir()
|
||||||
|
|
||||||
with open(f"{result.project}/.travis.yml", "r") as travis_yml:
|
with open(f"{result.project_path}/.travis.yml") as travis_yml:
|
||||||
try:
|
try:
|
||||||
yml = yaml.load(travis_yml, Loader=yaml.FullLoader)["jobs"]["include"]
|
yml = yaml.safe_load(travis_yml)["jobs"]["include"]
|
||||||
assert yml[0]["script"] == ["flake8"]
|
assert yml[0]["script"] == ["flake8"]
|
||||||
assert yml[1]["script"] == [expected_test_script]
|
assert yml[1]["script"] == [expected_test_script]
|
||||||
except yaml.YAMLError as e:
|
except yaml.YAMLError as e:
|
||||||
|
@ -198,7 +227,10 @@ def test_travis_invokes_pytest(cookies, context, use_docker, expected_test_scrip
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
["use_docker", "expected_test_script"],
|
["use_docker", "expected_test_script"],
|
||||||
[("n", "pytest"), ("y", "docker-compose -f local.yml run django pytest"),],
|
[
|
||||||
|
("n", "pytest"),
|
||||||
|
("y", "docker-compose -f local.yml run django pytest"),
|
||||||
|
],
|
||||||
)
|
)
|
||||||
def test_gitlab_invokes_flake8_and_pytest(
|
def test_gitlab_invokes_flake8_and_pytest(
|
||||||
cookies, context, use_docker, expected_test_script
|
cookies, context, use_docker, expected_test_script
|
||||||
|
@ -208,21 +240,57 @@ def test_gitlab_invokes_flake8_and_pytest(
|
||||||
|
|
||||||
assert result.exit_code == 0
|
assert result.exit_code == 0
|
||||||
assert result.exception is None
|
assert result.exception is None
|
||||||
assert result.project.basename == context["project_slug"]
|
assert result.project_path.name == context["project_slug"]
|
||||||
assert result.project.isdir()
|
assert result.project_path.is_dir()
|
||||||
|
|
||||||
with open(f"{result.project}/.gitlab-ci.yml", "r") as gitlab_yml:
|
with open(f"{result.project_path}/.gitlab-ci.yml") as gitlab_yml:
|
||||||
try:
|
try:
|
||||||
gitlab_config = yaml.load(gitlab_yml, Loader=yaml.FullLoader)
|
gitlab_config = yaml.safe_load(gitlab_yml)
|
||||||
assert gitlab_config["flake8"]["script"] == ["flake8"]
|
assert gitlab_config["flake8"]["script"] == ["flake8"]
|
||||||
assert gitlab_config["pytest"]["script"] == [expected_test_script]
|
assert gitlab_config["pytest"]["script"] == [expected_test_script]
|
||||||
except yaml.YAMLError as e:
|
except yaml.YAMLError as e:
|
||||||
pytest.fail(e)
|
pytest.fail(e)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
["use_docker", "expected_test_script"],
|
||||||
|
[
|
||||||
|
("n", "pytest"),
|
||||||
|
("y", "docker-compose -f local.yml run django pytest"),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_github_invokes_linter_and_pytest(
|
||||||
|
cookies, context, use_docker, expected_test_script
|
||||||
|
):
|
||||||
|
context.update({"ci_tool": "Github", "use_docker": use_docker})
|
||||||
|
result = cookies.bake(extra_context=context)
|
||||||
|
|
||||||
|
assert result.exit_code == 0
|
||||||
|
assert result.exception is None
|
||||||
|
assert result.project_path.name == context["project_slug"]
|
||||||
|
assert result.project_path.is_dir()
|
||||||
|
|
||||||
|
with open(f"{result.project_path}/.github/workflows/ci.yml") as github_yml:
|
||||||
|
try:
|
||||||
|
github_config = yaml.safe_load(github_yml)
|
||||||
|
linter_present = False
|
||||||
|
for action_step in github_config["jobs"]["linter"]["steps"]:
|
||||||
|
if action_step.get("uses", "NA").startswith("pre-commit"):
|
||||||
|
linter_present = True
|
||||||
|
assert linter_present
|
||||||
|
|
||||||
|
expected_test_script_present = False
|
||||||
|
for action_step in github_config["jobs"]["pytest"]["steps"]:
|
||||||
|
if action_step.get("run") == expected_test_script:
|
||||||
|
expected_test_script_present = True
|
||||||
|
assert expected_test_script_present
|
||||||
|
except yaml.YAMLError as e:
|
||||||
|
pytest.fail(e)
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize("slug", ["project slug", "Project_Slug"])
|
@pytest.mark.parametrize("slug", ["project slug", "Project_Slug"])
|
||||||
def test_invalid_slug(cookies, context, slug):
|
def test_invalid_slug(cookies, context, slug):
|
||||||
"""Invalid slug should failed pre-generation hook."""
|
"""Invalid slug should fail pre-generation hook."""
|
||||||
context.update({"project_slug": slug})
|
context.update({"project_slug": slug})
|
||||||
|
|
||||||
result = cookies.bake(extra_context=context)
|
result = cookies.bake(extra_context=context)
|
||||||
|
@ -239,3 +307,20 @@ def test_error_if_incompatible(cookies, context, invalid_context):
|
||||||
|
|
||||||
assert result.exit_code != 0
|
assert result.exit_code != 0
|
||||||
assert isinstance(result.exception, FailedHookException)
|
assert isinstance(result.exception, FailedHookException)
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.parametrize(
|
||||||
|
["use_pycharm", "pycharm_docs_exist"],
|
||||||
|
[
|
||||||
|
("n", False),
|
||||||
|
("y", True),
|
||||||
|
],
|
||||||
|
)
|
||||||
|
def test_pycharm_docs_removed(cookies, context, use_pycharm, pycharm_docs_exist):
|
||||||
|
"""."""
|
||||||
|
context.update({"use_pycharm": use_pycharm})
|
||||||
|
result = cookies.bake(extra_context=context)
|
||||||
|
|
||||||
|
with open(f"{result.project_path}/docs/index.rst") as f:
|
||||||
|
has_pycharm_docs = "pycharm/configuration" in f.read()
|
||||||
|
assert has_pycharm_docs is pycharm_docs_exist
|
||||||
|
|
|
@ -4,24 +4,29 @@
|
||||||
# sh tests/test_docker.sh
|
# sh tests/test_docker.sh
|
||||||
|
|
||||||
set -o errexit
|
set -o errexit
|
||||||
|
set -x
|
||||||
# install test requirements
|
|
||||||
pip install -r requirements.txt
|
|
||||||
|
|
||||||
# create a cache directory
|
# create a cache directory
|
||||||
mkdir -p .cache/docker
|
mkdir -p .cache/docker
|
||||||
cd .cache/docker
|
cd .cache/docker
|
||||||
|
|
||||||
# create the project using the default settings in cookiecutter.json
|
# create the project using the default settings in cookiecutter.json
|
||||||
cookiecutter ../../ --no-input --overwrite-if-exists use_docker=y $@
|
cookiecutter ../../ --no-input --overwrite-if-exists use_docker=y "$@"
|
||||||
cd my_awesome_project
|
cd my_awesome_project
|
||||||
|
|
||||||
|
# Lint by running pre-commit on all files
|
||||||
|
# Needs a git repo to find the project root
|
||||||
|
# We don't have git inside Docker, so run it outside
|
||||||
|
git init
|
||||||
|
git add .
|
||||||
|
pre-commit run --show-diff-on-failure -a
|
||||||
|
|
||||||
|
# make sure all images build
|
||||||
|
docker-compose -f local.yml build
|
||||||
|
|
||||||
# run the project's type checks
|
# run the project's type checks
|
||||||
docker-compose -f local.yml run django mypy my_awesome_project
|
docker-compose -f local.yml run django mypy my_awesome_project
|
||||||
|
|
||||||
# Run black with --check option
|
|
||||||
docker-compose -f local.yml run django black --check --diff --exclude 'migrations' ./
|
|
||||||
|
|
||||||
# run the project's tests
|
# run the project's tests
|
||||||
docker-compose -f local.yml run django pytest
|
docker-compose -f local.yml run django pytest
|
||||||
|
|
||||||
|
@ -29,4 +34,16 @@ docker-compose -f local.yml run django pytest
|
||||||
docker-compose -f local.yml run django python manage.py makemigrations --dry-run --check || { echo "ERROR: there were changes in the models, but migration listed above have not been created and are not saved in version control"; exit 1; }
|
docker-compose -f local.yml run django python manage.py makemigrations --dry-run --check || { echo "ERROR: there were changes in the models, but migration listed above have not been created and are not saved in version control"; exit 1; }
|
||||||
|
|
||||||
# Test support for translations
|
# Test support for translations
|
||||||
docker-compose -f local.yml run django python manage.py makemessages
|
docker-compose -f local.yml run django python manage.py makemessages --all
|
||||||
|
|
||||||
|
# Make sure the check doesn't raise any warnings
|
||||||
|
docker-compose -f local.yml run django python manage.py check --fail-level WARNING
|
||||||
|
|
||||||
|
# Generate the HTML for the documentation
|
||||||
|
docker-compose -f local.yml run docs make html
|
||||||
|
|
||||||
|
# Run npm build script if package.json is present
|
||||||
|
if [ -f "package.json" ]
|
||||||
|
then
|
||||||
|
docker-compose -f local.yml run node npm run build
|
||||||
|
fi
|
||||||
|
|
28
tests/test_hooks.py
Normal file
28
tests/test_hooks.py
Normal file
|
@ -0,0 +1,28 @@
|
||||||
|
"""Unit tests for the hooks"""
|
||||||
|
import os
|
||||||
|
from pathlib import Path
|
||||||
|
|
||||||
|
import pytest
|
||||||
|
|
||||||
|
from hooks.post_gen_project import append_to_gitignore_file
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.fixture()
|
||||||
|
def working_directory(tmp_path):
|
||||||
|
prev_cwd = Path.cwd()
|
||||||
|
os.chdir(tmp_path)
|
||||||
|
try:
|
||||||
|
yield tmp_path
|
||||||
|
finally:
|
||||||
|
os.chdir(prev_cwd)
|
||||||
|
|
||||||
|
|
||||||
|
def test_append_to_gitignore_file(working_directory):
|
||||||
|
gitignore_file = working_directory / ".gitignore"
|
||||||
|
gitignore_file.write_text("node_modules/\n")
|
||||||
|
append_to_gitignore_file(".envs/*")
|
||||||
|
linesep = os.linesep.encode()
|
||||||
|
assert (
|
||||||
|
gitignore_file.read_bytes() == b"node_modules/" + linesep + b".envs/*" + linesep
|
||||||
|
)
|
||||||
|
assert gitignore_file.read_text() == "node_modules/\n.envs/*\n"
|
4
tox.ini
4
tox.ini
|
@ -1,6 +1,6 @@
|
||||||
[tox]
|
[tox]
|
||||||
skipsdist = true
|
skipsdist = true
|
||||||
envlist = py38,black-template
|
envlist = py310,black-template
|
||||||
|
|
||||||
[testenv]
|
[testenv]
|
||||||
deps = -rrequirements.txt
|
deps = -rrequirements.txt
|
||||||
|
@ -8,4 +8,4 @@ commands = pytest {posargs:./tests}
|
||||||
|
|
||||||
[testenv:black-template]
|
[testenv:black-template]
|
||||||
deps = black
|
deps = black
|
||||||
commands = black --check hooks tests setup.py docs
|
commands = black --check hooks tests setup.py docs scripts
|
||||||
|
|
|
@ -1,4 +1,11 @@
|
||||||
.*
|
.editorconfig
|
||||||
!.coveragerc
|
.gitattributes
|
||||||
!.env
|
.github
|
||||||
!.pylintrc
|
.gitignore
|
||||||
|
.gitlab-ci.yml
|
||||||
|
.idea
|
||||||
|
.pre-commit-config.yaml
|
||||||
|
.readthedocs.yml
|
||||||
|
.travis.yml
|
||||||
|
venv
|
||||||
|
.git
|
||||||
|
|
|
@ -12,19 +12,7 @@ trim_trailing_whitespace = true
|
||||||
indent_style = space
|
indent_style = space
|
||||||
indent_size = 4
|
indent_size = 4
|
||||||
|
|
||||||
[*.py]
|
[*.{html,css,scss,json,yml,xml}]
|
||||||
line_length = 88
|
|
||||||
known_first_party = {{cookiecutter.project_slug}},config
|
|
||||||
multi_line_output = 3
|
|
||||||
default_section = THIRDPARTY
|
|
||||||
recursive = true
|
|
||||||
skip = venv/
|
|
||||||
skip_glob = **/migrations/*.py
|
|
||||||
include_trailing_comma = true
|
|
||||||
force_grid_wrap = 0
|
|
||||||
use_parentheses = true
|
|
||||||
|
|
||||||
[*.{html,css,scss,json,yml}]
|
|
||||||
indent_style = space
|
indent_style = space
|
||||||
indent_size = 2
|
indent_size = 2
|
||||||
|
|
||||||
|
|
|
@ -14,4 +14,4 @@ REDIS_URL=redis://redis:6379/0
|
||||||
# Flower
|
# Flower
|
||||||
CELERY_FLOWER_USER=!!!SET CELERY_FLOWER_USER!!!
|
CELERY_FLOWER_USER=!!!SET CELERY_FLOWER_USER!!!
|
||||||
CELERY_FLOWER_PASSWORD=!!!SET CELERY_FLOWER_PASSWORD!!!
|
CELERY_FLOWER_PASSWORD=!!!SET CELERY_FLOWER_PASSWORD!!!
|
||||||
{% endif %}
|
{%- endif %}
|
||||||
|
|
|
@ -44,6 +44,12 @@ DJANGO_AWS_STORAGE_BUCKET_NAME=
|
||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
GOOGLE_APPLICATION_CREDENTIALS=
|
GOOGLE_APPLICATION_CREDENTIALS=
|
||||||
DJANGO_GCP_STORAGE_BUCKET_NAME=
|
DJANGO_GCP_STORAGE_BUCKET_NAME=
|
||||||
|
{% elif cookiecutter.cloud_provider == 'Azure' %}
|
||||||
|
# Azure
|
||||||
|
# ------------------------------------------------------------------------------
|
||||||
|
DJANGO_AZURE_ACCOUNT_KEY=
|
||||||
|
DJANGO_AZURE_ACCOUNT_NAME=
|
||||||
|
DJANGO_AZURE_CONTAINER_NAME=
|
||||||
{% endif %}
|
{% endif %}
|
||||||
# django-allauth
|
# django-allauth
|
||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
|
|
95
{{cookiecutter.project_slug}}/.github/dependabot.yml
vendored
Normal file
95
{{cookiecutter.project_slug}}/.github/dependabot.yml
vendored
Normal file
|
@ -0,0 +1,95 @@
|
||||||
|
# Config for Dependabot updates. See Documentation here:
|
||||||
|
# https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
|
||||||
|
|
||||||
|
version: 2
|
||||||
|
updates:
|
||||||
|
# Update GitHub actions in workflows
|
||||||
|
- package-ecosystem: "github-actions"
|
||||||
|
directory: "/"
|
||||||
|
# Check for updates to GitHub Actions every weekday
|
||||||
|
schedule:
|
||||||
|
interval: "daily"
|
||||||
|
|
||||||
|
{%- if cookiecutter.use_docker == 'y' %}
|
||||||
|
|
||||||
|
# Enable version updates for Docker
|
||||||
|
# We need to specify each Dockerfile in a separate entry because Dependabot doesn't
|
||||||
|
# support wildcards or recursively checking subdirectories. Check this issue for updates:
|
||||||
|
# https://github.com/dependabot/dependabot-core/issues/2178
|
||||||
|
- package-ecosystem: "docker"
|
||||||
|
# Look for a `Dockerfile` in the `compose/local/django` directory
|
||||||
|
directory: "compose/local/django/"
|
||||||
|
# Check for updates to GitHub Actions every weekday
|
||||||
|
schedule:
|
||||||
|
interval: "daily"
|
||||||
|
|
||||||
|
# Enable version updates for Docker
|
||||||
|
- package-ecosystem: "docker"
|
||||||
|
# Look for a `Dockerfile` in the `compose/local/docs` directory
|
||||||
|
directory: "compose/local/docs/"
|
||||||
|
# Check for updates to GitHub Actions every weekday
|
||||||
|
schedule:
|
||||||
|
interval: "daily"
|
||||||
|
|
||||||
|
# Enable version updates for Docker
|
||||||
|
- package-ecosystem: "docker"
|
||||||
|
# Look for a `Dockerfile` in the `compose/local/node` directory
|
||||||
|
directory: "compose/local/node/"
|
||||||
|
# Check for updates to GitHub Actions every weekday
|
||||||
|
schedule:
|
||||||
|
interval: "daily"
|
||||||
|
|
||||||
|
# Enable version updates for Docker
|
||||||
|
- package-ecosystem: "docker"
|
||||||
|
# Look for a `Dockerfile` in the `compose/production/aws` directory
|
||||||
|
directory: "compose/production/aws/"
|
||||||
|
# Check for updates to GitHub Actions every weekday
|
||||||
|
schedule:
|
||||||
|
interval: "daily"
|
||||||
|
|
||||||
|
# Enable version updates for Docker
|
||||||
|
- package-ecosystem: "docker"
|
||||||
|
# Look for a `Dockerfile` in the `compose/production/django` directory
|
||||||
|
directory: "compose/production/django/"
|
||||||
|
# Check for updates to GitHub Actions every weekday
|
||||||
|
schedule:
|
||||||
|
interval: "daily"
|
||||||
|
|
||||||
|
# Enable version updates for Docker
|
||||||
|
- package-ecosystem: "docker"
|
||||||
|
# Look for a `Dockerfile` in the `compose/production/postgres` directory
|
||||||
|
directory: "compose/production/postgres/"
|
||||||
|
# Check for updates to GitHub Actions every weekday
|
||||||
|
schedule:
|
||||||
|
interval: "daily"
|
||||||
|
|
||||||
|
# Enable version updates for Docker
|
||||||
|
- package-ecosystem: "docker"
|
||||||
|
# Look for a `Dockerfile` in the `compose/production/traefik` directory
|
||||||
|
directory: "compose/production/traefik/"
|
||||||
|
# Check for updates to GitHub Actions every weekday
|
||||||
|
schedule:
|
||||||
|
interval: "daily"
|
||||||
|
|
||||||
|
{%- endif %}
|
||||||
|
|
||||||
|
# Enable version updates for Python/Pip - Production
|
||||||
|
- package-ecosystem: "pip"
|
||||||
|
# Look for a `requirements.txt` in the `root` directory
|
||||||
|
# also 'setup.cfg', 'runtime.txt' and 'requirements/*.txt'
|
||||||
|
directory: "/"
|
||||||
|
# Check for updates to GitHub Actions every weekday
|
||||||
|
schedule:
|
||||||
|
interval: "daily"
|
||||||
|
|
||||||
|
{%- if cookiecutter.frontend_pipeline == 'Gulp' %}
|
||||||
|
|
||||||
|
# Enable version updates for javascript/npm
|
||||||
|
- package-ecosystem: "npm"
|
||||||
|
# Look for a `packages.json' in the `root` directory
|
||||||
|
directory: "/"
|
||||||
|
# Check for updates to GitHub Actions every weekday
|
||||||
|
schedule:
|
||||||
|
interval: "daily"
|
||||||
|
|
||||||
|
{%- endif %}
|
103
{{cookiecutter.project_slug}}/.github/workflows/ci.yml
vendored
Normal file
103
{{cookiecutter.project_slug}}/.github/workflows/ci.yml
vendored
Normal file
|
@ -0,0 +1,103 @@
|
||||||
|
name: CI
|
||||||
|
|
||||||
|
# Enable Buildkit and let compose use it to speed up image building
|
||||||
|
env:
|
||||||
|
DOCKER_BUILDKIT: 1
|
||||||
|
COMPOSE_DOCKER_CLI_BUILD: 1
|
||||||
|
|
||||||
|
on:
|
||||||
|
pull_request:
|
||||||
|
branches: [ "master", "main" ]
|
||||||
|
paths-ignore: [ "docs/**" ]
|
||||||
|
|
||||||
|
push:
|
||||||
|
branches: [ "master", "main" ]
|
||||||
|
paths-ignore: [ "docs/**" ]
|
||||||
|
|
||||||
|
concurrency:
|
||||||
|
group: {% raw %}${{ github.head_ref || github.run_id }}{% endraw %}
|
||||||
|
cancel-in-progress: true
|
||||||
|
|
||||||
|
jobs:
|
||||||
|
linter:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
|
||||||
|
- name: Checkout Code Repository
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
|
||||||
|
- name: Set up Python
|
||||||
|
uses: actions/setup-python@v3
|
||||||
|
with:
|
||||||
|
python-version: "3.10"
|
||||||
|
cache: pip
|
||||||
|
cache-dependency-path: |
|
||||||
|
requirements/base.txt
|
||||||
|
requirements/local.txt
|
||||||
|
|
||||||
|
- name: Run pre-commit
|
||||||
|
uses: pre-commit/action@v2.0.3
|
||||||
|
|
||||||
|
# With no caching at all the entire ci process takes 4m 30s to complete!
|
||||||
|
pytest:
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
{%- if cookiecutter.use_docker == 'n' %}
|
||||||
|
|
||||||
|
services:
|
||||||
|
{%- if cookiecutter.use_celery == 'y' %}
|
||||||
|
redis:
|
||||||
|
image: redis:6
|
||||||
|
ports:
|
||||||
|
- 6379:6379
|
||||||
|
{%- endif %}
|
||||||
|
postgres:
|
||||||
|
image: postgres:12
|
||||||
|
ports:
|
||||||
|
- 5432:5432
|
||||||
|
env:
|
||||||
|
POSTGRES_PASSWORD: postgres
|
||||||
|
|
||||||
|
env:
|
||||||
|
{%- if cookiecutter.use_celery == 'y' %}
|
||||||
|
CELERY_BROKER_URL: "redis://localhost:6379/0"
|
||||||
|
{%- endif %}
|
||||||
|
# postgres://user:password@host:port/database
|
||||||
|
DATABASE_URL: "postgres://postgres:postgres@localhost:5432/postgres"
|
||||||
|
{%- endif %}
|
||||||
|
|
||||||
|
steps:
|
||||||
|
|
||||||
|
- name: Checkout Code Repository
|
||||||
|
uses: actions/checkout@v3
|
||||||
|
{%- if cookiecutter.use_docker == 'y' %}
|
||||||
|
|
||||||
|
- name: Build the Stack
|
||||||
|
run: docker-compose -f local.yml build
|
||||||
|
|
||||||
|
- name: Run DB Migrations
|
||||||
|
run: docker-compose -f local.yml run --rm django python manage.py migrate
|
||||||
|
|
||||||
|
- name: Run Django Tests
|
||||||
|
run: docker-compose -f local.yml run django pytest
|
||||||
|
|
||||||
|
- name: Tear down the Stack
|
||||||
|
run: docker-compose -f local.yml down
|
||||||
|
{%- else %}
|
||||||
|
|
||||||
|
- name: Set up Python
|
||||||
|
uses: actions/setup-python@v3
|
||||||
|
with:
|
||||||
|
python-version: "3.10"
|
||||||
|
cache: pip
|
||||||
|
cache-dependency-path: |
|
||||||
|
requirements/base.txt
|
||||||
|
requirements/local.txt
|
||||||
|
|
||||||
|
- name: Install Dependencies
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
pip install -r requirements/local.txt
|
||||||
|
|
||||||
|
- name: Test with pytest
|
||||||
|
run: pytest
|
||||||
|
{%- endif %}
|
34
{{cookiecutter.project_slug}}/.gitignore
vendored
34
{{cookiecutter.project_slug}}/.gitignore
vendored
|
@ -159,6 +159,10 @@ typings/
|
||||||
!.vscode/tasks.json
|
!.vscode/tasks.json
|
||||||
!.vscode/launch.json
|
!.vscode/launch.json
|
||||||
!.vscode/extensions.json
|
!.vscode/extensions.json
|
||||||
|
*.code-workspace
|
||||||
|
|
||||||
|
# Local History for Visual Studio Code
|
||||||
|
.history/
|
||||||
|
|
||||||
|
|
||||||
{% if cookiecutter.use_pycharm == 'y' -%}
|
{% if cookiecutter.use_pycharm == 'y' -%}
|
||||||
|
@ -321,28 +325,30 @@ Session.vim
|
||||||
|
|
||||||
# Auto-generated tag files
|
# Auto-generated tag files
|
||||||
tags
|
tags
|
||||||
{% if cookiecutter.use_docker == 'n' %}
|
|
||||||
|
|
||||||
### VirtualEnv template
|
# Redis dump file
|
||||||
# Virtualenv
|
dump.rdb
|
||||||
[Bb]in
|
|
||||||
[Ii]nclude
|
|
||||||
[Ll]ib
|
|
||||||
[Ll]ib64
|
|
||||||
[Ss]cripts
|
|
||||||
pyvenv.cfg
|
|
||||||
pip-selfcheck.json
|
|
||||||
.env
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
### Project template
|
### Project template
|
||||||
{% if cookiecutter.use_mailhog == 'y' and cookiecutter.use_docker == 'n' %}
|
{%- if cookiecutter.use_mailhog == 'y' and cookiecutter.use_docker == 'n' %}
|
||||||
MailHog
|
MailHog
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
{{ cookiecutter.project_slug }}/media/
|
{{ cookiecutter.project_slug }}/media/
|
||||||
|
|
||||||
.pytest_cache/
|
.pytest_cache/
|
||||||
|
|
||||||
{% if cookiecutter.use_docker == 'y' %}
|
{%- if cookiecutter.use_docker == 'y' %}
|
||||||
.ipython/
|
.ipython/
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
||||||
|
{%- if cookiecutter.frontend_pipeline == 'Gulp' %}
|
||||||
|
project.css
|
||||||
|
project.min.css
|
||||||
|
vendors.js
|
||||||
|
*.min.js
|
||||||
|
*.min.js.map
|
||||||
|
{%- endif %}
|
||||||
|
{%- if cookiecutter.frontend_pipeline == 'Webpack' %}
|
||||||
|
{{ cookiecutter.project_slug }}/static/webpack_bundles/
|
||||||
|
webpack-stats.json
|
||||||
|
{%- endif %}
|
||||||
|
|
|
@ -13,7 +13,7 @@ variables:
|
||||||
|
|
||||||
flake8:
|
flake8:
|
||||||
stage: lint
|
stage: lint
|
||||||
image: python:3.7-alpine
|
image: python:3.10-alpine
|
||||||
before_script:
|
before_script:
|
||||||
- pip install -q flake8
|
- pip install -q flake8
|
||||||
script:
|
script:
|
||||||
|
@ -21,12 +21,12 @@ flake8:
|
||||||
|
|
||||||
pytest:
|
pytest:
|
||||||
stage: test
|
stage: test
|
||||||
image: python:3.7
|
|
||||||
{% if cookiecutter.use_docker == 'y' -%}
|
{% if cookiecutter.use_docker == 'y' -%}
|
||||||
|
image: docker/compose:1.29.2
|
||||||
tags:
|
tags:
|
||||||
- docker
|
- docker
|
||||||
services:
|
services:
|
||||||
- docker
|
- docker:dind
|
||||||
before_script:
|
before_script:
|
||||||
- docker-compose -f local.yml build
|
- docker-compose -f local.yml build
|
||||||
# Ensure celerybeat does not crash due to non-existent tables
|
# Ensure celerybeat does not crash due to non-existent tables
|
||||||
|
@ -34,11 +34,12 @@ pytest:
|
||||||
- docker-compose -f local.yml up -d
|
- docker-compose -f local.yml up -d
|
||||||
script:
|
script:
|
||||||
- docker-compose -f local.yml run django pytest
|
- docker-compose -f local.yml run django pytest
|
||||||
{%- else %}
|
{%- else -%}
|
||||||
|
image: python:3.10
|
||||||
tags:
|
tags:
|
||||||
- python
|
- python
|
||||||
services:
|
services:
|
||||||
- postgres:11
|
- postgres:{{ cookiecutter.postgresql_version }}
|
||||||
variables:
|
variables:
|
||||||
DATABASE_URL: pgsql://$POSTGRES_USER:$POSTGRES_PASSWORD@postgres/$POSTGRES_DB
|
DATABASE_URL: pgsql://$POSTGRES_USER:$POSTGRES_PASSWORD@postgres/$POSTGRES_DB
|
||||||
|
|
||||||
|
|
|
@ -0,0 +1,23 @@
|
||||||
|
<component name="ProjectRunConfigurationManager">
|
||||||
|
<configuration name="docker-compose up django" type="docker-deploy" factoryName="docker-compose.yml" server-name="Docker">
|
||||||
|
<deployment type="docker-compose.yml">
|
||||||
|
<settings>
|
||||||
|
<option name="envFilePath" value=""/>
|
||||||
|
<option name="services">
|
||||||
|
<list>
|
||||||
|
<option value="django"/>
|
||||||
|
{%- if cookiecutter.use_celery == 'y' %}
|
||||||
|
<option value="celeryworker"/>
|
||||||
|
<option value="celerybeat"/>
|
||||||
|
{%- endif %}
|
||||||
|
{%- if cookiecutter.frontend_pipeline in ['Gulp', 'Webpack'] %}
|
||||||
|
<option value="node"/>
|
||||||
|
{%- endif %}
|
||||||
|
</list>
|
||||||
|
</option>
|
||||||
|
<option name="sourceFilePath" value="local.yml"/>
|
||||||
|
</settings>
|
||||||
|
</deployment>
|
||||||
|
<method v="2"/>
|
||||||
|
</configuration>
|
||||||
|
</component>
|
|
@ -0,0 +1,16 @@
|
||||||
|
<component name="ProjectRunConfigurationManager">
|
||||||
|
<configuration name="docker-compose up docs" type="docker-deploy" factoryName="docker-compose.yml" server-name="Docker">
|
||||||
|
<deployment type="docker-compose.yml">
|
||||||
|
<settings>
|
||||||
|
<option name="envFilePath" value=""/>
|
||||||
|
<option name="services">
|
||||||
|
<list>
|
||||||
|
<option value="docs"/>
|
||||||
|
</list>
|
||||||
|
</option>
|
||||||
|
<option name="sourceFilePath" value="local.yml"/>
|
||||||
|
</settings>
|
||||||
|
</deployment>
|
||||||
|
<method v="2"/>
|
||||||
|
</configuration>
|
||||||
|
</component>
|
|
@ -13,7 +13,7 @@
|
||||||
</facet>
|
</facet>
|
||||||
</component>
|
</component>
|
||||||
<component name="NewModuleRootManager">
|
<component name="NewModuleRootManager">
|
||||||
{% if cookiecutter.js_task_runner != 'None' %}
|
{% if cookiecutter.frontend_pipeline in ['Gulp', 'Webpack'] %}
|
||||||
<content url="file://$MODULE_DIR$">
|
<content url="file://$MODULE_DIR$">
|
||||||
<excludeFolder url="file://$MODULE_DIR$/node_modules" />
|
<excludeFolder url="file://$MODULE_DIR$/node_modules" />
|
||||||
</content>
|
</content>
|
||||||
|
|
|
@ -1,24 +1,39 @@
|
||||||
exclude: 'docs|node_modules|migrations|.git|.tox'
|
exclude: "^docs/|/migrations/"
|
||||||
default_stages: [commit]
|
default_stages: [commit]
|
||||||
fail_fast: true
|
|
||||||
|
|
||||||
repos:
|
repos:
|
||||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||||
rev: master
|
rev: v4.4.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: trailing-whitespace
|
- id: trailing-whitespace
|
||||||
- id: end-of-file-fixer
|
- id: end-of-file-fixer
|
||||||
- id: check-yaml
|
- id: check-yaml
|
||||||
|
|
||||||
|
- repo: https://github.com/asottile/pyupgrade
|
||||||
|
rev: v3.3.1
|
||||||
|
hooks:
|
||||||
|
- id: pyupgrade
|
||||||
|
args: [--py310-plus]
|
||||||
|
|
||||||
- repo: https://github.com/psf/black
|
- repo: https://github.com/psf/black
|
||||||
rev: 19.10b0
|
rev: 23.1.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: black
|
- id: black
|
||||||
|
|
||||||
- repo: https://gitlab.com/pycqa/flake8
|
- repo: https://github.com/PyCQA/isort
|
||||||
rev: 3.8.1
|
rev: 5.12.0
|
||||||
|
hooks:
|
||||||
|
- id: isort
|
||||||
|
|
||||||
|
- repo: https://github.com/PyCQA/flake8
|
||||||
|
rev: 6.0.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: flake8
|
- id: flake8
|
||||||
args: ['--config=setup.cfg']
|
args: ["--config=setup.cfg"]
|
||||||
additional_dependencies: [flake8-isort]
|
additional_dependencies: [flake8-isort]
|
||||||
|
|
||||||
|
# sets up .pre-commit-ci.yaml to ensure pre-commit dependencies stay up to date
|
||||||
|
ci:
|
||||||
|
autoupdate_schedule: weekly
|
||||||
|
skip: []
|
||||||
|
submodules: false
|
||||||
|
|
|
@ -1,6 +1,6 @@
|
||||||
[MASTER]
|
[MASTER]
|
||||||
load-plugins=pylint_django{% if cookiecutter.use_celery == "y" %}, pylint_celery {% endif %}
|
load-plugins=pylint_django{% if cookiecutter.use_celery == "y" %}, pylint_celery{% endif %}
|
||||||
|
django-settings-module=config.settings.local
|
||||||
[FORMAT]
|
[FORMAT]
|
||||||
max-line-length=120
|
max-line-length=120
|
||||||
|
|
||||||
|
|
12
{{cookiecutter.project_slug}}/.readthedocs.yml
Normal file
12
{{cookiecutter.project_slug}}/.readthedocs.yml
Normal file
|
@ -0,0 +1,12 @@
|
||||||
|
version: 2
|
||||||
|
|
||||||
|
sphinx:
|
||||||
|
configuration: docs/conf.py
|
||||||
|
|
||||||
|
build:
|
||||||
|
image: testing
|
||||||
|
|
||||||
|
python:
|
||||||
|
version: 3.10
|
||||||
|
install:
|
||||||
|
- requirements: requirements/local.txt
|
|
@ -1,8 +1,8 @@
|
||||||
dist: xenial
|
dist: focal
|
||||||
|
|
||||||
language: python
|
language: python
|
||||||
python:
|
python:
|
||||||
- "3.8"
|
- "3.10"
|
||||||
|
|
||||||
services:
|
services:
|
||||||
- {% if cookiecutter.use_docker == 'y' %}docker{% else %}postgresql{% endif %}
|
- {% if cookiecutter.use_docker == 'y' %}docker{% else %}postgresql{% endif %}
|
||||||
|
@ -37,7 +37,7 @@ jobs:
|
||||||
- sudo apt-get install -qq libsqlite3-dev libxml2 libxml2-dev libssl-dev libbz2-dev wget curl llvm
|
- sudo apt-get install -qq libsqlite3-dev libxml2 libxml2-dev libssl-dev libbz2-dev wget curl llvm
|
||||||
language: python
|
language: python
|
||||||
python:
|
python:
|
||||||
- "3.8"
|
- "3.10"
|
||||||
install:
|
install:
|
||||||
- pip install -r requirements/local.txt
|
- pip install -r requirements/local.txt
|
||||||
script:
|
script:
|
||||||
|
|
|
@ -7,7 +7,7 @@ Permission is hereby granted, free of charge, to any person obtaining a copy of
|
||||||
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
The above copyright notice and this permission notice shall be included in all copies or substantial portions of the Software.
|
||||||
|
|
||||||
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY, FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM, OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE SOFTWARE.
|
||||||
{% elif cookiecutter.open_source_license == 'BSD' %}
|
{%- elif cookiecutter.open_source_license == 'BSD' %}
|
||||||
Copyright (c) {% now 'utc', '%Y' %}, {{ cookiecutter.author_name }}
|
Copyright (c) {% now 'utc', '%Y' %}, {{ cookiecutter.author_name }}
|
||||||
All rights reserved.
|
All rights reserved.
|
||||||
|
|
||||||
|
@ -35,7 +35,7 @@ DATA, OR PROFITS; OR BUSINESS INTERRUPTION) HOWEVER CAUSED AND ON ANY THEORY
|
||||||
OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
|
OF LIABILITY, WHETHER IN CONTRACT, STRICT LIABILITY, OR TORT (INCLUDING NEGLIGENCE
|
||||||
OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
|
OR OTHERWISE) ARISING IN ANY WAY OUT OF THE USE OF THIS SOFTWARE, EVEN IF ADVISED
|
||||||
OF THE POSSIBILITY OF SUCH DAMAGE.
|
OF THE POSSIBILITY OF SUCH DAMAGE.
|
||||||
{% elif cookiecutter.open_source_license == 'GPLv3' %}
|
{%- elif cookiecutter.open_source_license == 'GPLv3' %}
|
||||||
Copyright (c) {% now 'utc', '%Y' %}, {{ cookiecutter.author_name }}
|
Copyright (c) {% now 'utc', '%Y' %}, {{ cookiecutter.author_name }}
|
||||||
|
|
||||||
This program is free software: you can redistribute it and/or modify
|
This program is free software: you can redistribute it and/or modify
|
||||||
|
@ -50,7 +50,7 @@ GNU General Public License for more details.
|
||||||
|
|
||||||
You should have received a copy of the GNU General Public License
|
You should have received a copy of the GNU General Public License
|
||||||
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
{% elif cookiecutter.open_source_license == 'Apache Software License 2.0' %}
|
{%- elif cookiecutter.open_source_license == 'Apache Software License 2.0' %}
|
||||||
|
|
||||||
Apache License
|
Apache License
|
||||||
Version 2.0, January 2004
|
Version 2.0, January 2004
|
||||||
|
@ -242,4 +242,4 @@ along with this program. If not, see <http://www.gnu.org/licenses/>.
|
||||||
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
WITHOUT WARRANTIES OR CONDITIONS OF ANY KIND, either express or implied.
|
||||||
See the License for the specific language governing permissions and
|
See the License for the specific language governing permissions and
|
||||||
limitations under the License.
|
limitations under the License.
|
||||||
{% endif %}
|
{%- endif %}
|
||||||
|
|
|
@ -1,10 +1,10 @@
|
||||||
release: python manage.py migrate
|
release: python manage.py migrate
|
||||||
{% if cookiecutter.use_async == "y" -%}
|
{%- if cookiecutter.use_async == "y" %}
|
||||||
web: gunicorn config.asgi:application -k uvicorn.workers.UvicornWorker
|
web: gunicorn config.asgi:application -k uvicorn.workers.UvicornWorker
|
||||||
{%- else %}
|
{%- else %}
|
||||||
web: gunicorn config.wsgi:application
|
web: gunicorn config.wsgi:application
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
{% if cookiecutter.use_celery == "y" -%}
|
{%- if cookiecutter.use_celery == "y" %}
|
||||||
worker: celery worker --app=config.celery_app --loglevel=info
|
worker: REMAP_SIGTERM=SIGQUIT celery -A config.celery_app worker --loglevel=info
|
||||||
beat: celery beat --app=config.celery_app --loglevel=info
|
beat: REMAP_SIGTERM=SIGQUIT celery -A config.celery_app beat --loglevel=info
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
141
{{cookiecutter.project_slug}}/README.md
Normal file
141
{{cookiecutter.project_slug}}/README.md
Normal file
|
@ -0,0 +1,141 @@
|
||||||
|
# {{cookiecutter.project_name}}
|
||||||
|
|
||||||
|
{{ cookiecutter.description }}
|
||||||
|
|
||||||
|
[](https://github.com/cookiecutter/cookiecutter-django/)
|
||||||
|
[](https://github.com/ambv/black)
|
||||||
|
|
||||||
|
{%- if cookiecutter.open_source_license != "Not open source" %}
|
||||||
|
|
||||||
|
License: {{cookiecutter.open_source_license}}
|
||||||
|
{%- endif %}
|
||||||
|
|
||||||
|
## Settings
|
||||||
|
|
||||||
|
Moved to [settings](http://cookiecutter-django.readthedocs.io/en/latest/settings.html).
|
||||||
|
|
||||||
|
## Basic Commands
|
||||||
|
|
||||||
|
### Setting Up Your Users
|
||||||
|
|
||||||
|
- To create a **normal user account**, just go to Sign Up and fill out the form. Once you submit it, you'll see a "Verify Your E-mail Address" page. Go to your console to see a simulated email verification message. Copy the link into your browser. Now the user's email should be verified and ready to go.
|
||||||
|
|
||||||
|
- To create a **superuser account**, use this command:
|
||||||
|
|
||||||
|
$ python manage.py createsuperuser
|
||||||
|
|
||||||
|
For convenience, you can keep your normal user logged in on Chrome and your superuser logged in on Firefox (or similar), so that you can see how the site behaves for both kinds of users.
|
||||||
|
|
||||||
|
### Type checks
|
||||||
|
|
||||||
|
Running type checks with mypy:
|
||||||
|
|
||||||
|
$ mypy {{cookiecutter.project_slug}}
|
||||||
|
|
||||||
|
### Test coverage
|
||||||
|
|
||||||
|
To run the tests, check your test coverage, and generate an HTML coverage report:
|
||||||
|
|
||||||
|
$ coverage run -m pytest
|
||||||
|
$ coverage html
|
||||||
|
$ open htmlcov/index.html
|
||||||
|
|
||||||
|
#### Running tests with pytest
|
||||||
|
|
||||||
|
$ pytest
|
||||||
|
|
||||||
|
### Live reloading and Sass CSS compilation
|
||||||
|
|
||||||
|
Moved to [Live reloading and SASS compilation](https://cookiecutter-django.readthedocs.io/en/latest/developing-locally.html#sass-compilation-live-reloading).
|
||||||
|
|
||||||
|
{%- if cookiecutter.use_celery == "y" %}
|
||||||
|
|
||||||
|
### Celery
|
||||||
|
|
||||||
|
This app comes with Celery.
|
||||||
|
|
||||||
|
To run a celery worker:
|
||||||
|
|
||||||
|
``` bash
|
||||||
|
cd {{cookiecutter.project_slug}}
|
||||||
|
celery -A config.celery_app worker -l info
|
||||||
|
```
|
||||||
|
|
||||||
|
Please note: For Celery's import magic to work, it is important *where* the celery commands are run. If you are in the same folder with *manage.py*, you should be right.
|
||||||
|
|
||||||
|
{%- endif %}
|
||||||
|
{%- if cookiecutter.use_mailhog == "y" %}
|
||||||
|
|
||||||
|
### Email Server
|
||||||
|
|
||||||
|
{%- if cookiecutter.use_docker == "y" %}
|
||||||
|
|
||||||
|
In development, it is often nice to be able to see emails that are being sent from your application. For that reason local SMTP server [MailHog](https://github.com/mailhog/MailHog) with a web interface is available as docker container.
|
||||||
|
|
||||||
|
Container mailhog will start automatically when you will run all docker containers.
|
||||||
|
Please check [cookiecutter-django Docker documentation](http://cookiecutter-django.readthedocs.io/en/latest/deployment-with-docker.html) for more details how to start all containers.
|
||||||
|
|
||||||
|
With MailHog running, to view messages that are sent by your application, open your browser and go to `http://127.0.0.1:8025`
|
||||||
|
{%- else %}
|
||||||
|
|
||||||
|
In development, it is often nice to be able to see emails that are being sent from your application. If you choose to use [MailHog](https://github.com/mailhog/MailHog) when generating the project a local SMTP server with a web interface will be available.
|
||||||
|
|
||||||
|
1. [Download the latest MailHog release](https://github.com/mailhog/MailHog/releases) for your OS.
|
||||||
|
|
||||||
|
2. Rename the build to `MailHog`.
|
||||||
|
|
||||||
|
3. Copy the file to the project root.
|
||||||
|
|
||||||
|
4. Make it executable:
|
||||||
|
|
||||||
|
$ chmod +x MailHog
|
||||||
|
|
||||||
|
5. Spin up another terminal window and start it there:
|
||||||
|
|
||||||
|
./MailHog
|
||||||
|
|
||||||
|
6. Check out <http://127.0.0.1:8025/> to see how it goes.
|
||||||
|
|
||||||
|
Now you have your own mail server running locally, ready to receive whatever you send it.
|
||||||
|
|
||||||
|
{%- endif %}
|
||||||
|
|
||||||
|
{%- endif %}
|
||||||
|
{%- if cookiecutter.use_sentry == "y" %}
|
||||||
|
|
||||||
|
### Sentry
|
||||||
|
|
||||||
|
Sentry is an error logging aggregator service. You can sign up for a free account at <https://sentry.io/signup/?code=cookiecutter> or download and host it yourself.
|
||||||
|
The system is set up with reasonable defaults, including 404 logging and integration with the WSGI application.
|
||||||
|
|
||||||
|
You must set the DSN url in production.
|
||||||
|
{%- endif %}
|
||||||
|
|
||||||
|
## Deployment
|
||||||
|
|
||||||
|
The following details how to deploy this application.
|
||||||
|
{%- if cookiecutter.use_heroku.lower() == "y" %}
|
||||||
|
|
||||||
|
### Heroku
|
||||||
|
|
||||||
|
See detailed [cookiecutter-django Heroku documentation](http://cookiecutter-django.readthedocs.io/en/latest/deployment-on-heroku.html).
|
||||||
|
|
||||||
|
{%- endif %}
|
||||||
|
{%- if cookiecutter.use_docker.lower() == "y" %}
|
||||||
|
|
||||||
|
### Docker
|
||||||
|
|
||||||
|
See detailed [cookiecutter-django Docker documentation](http://cookiecutter-django.readthedocs.io/en/latest/deployment-with-docker.html).
|
||||||
|
|
||||||
|
{%- endif %}
|
||||||
|
{%- if cookiecutter.frontend_pipeline in ['Gulp', 'Webpack'] %}
|
||||||
|
|
||||||
|
### Custom Bootstrap Compilation
|
||||||
|
|
||||||
|
The generated CSS is set up with automatic Bootstrap recompilation with variables of your choice.
|
||||||
|
Bootstrap v5 is installed using npm and customised by tweaking your variables in `static/sass/custom_bootstrap_vars`.
|
||||||
|
|
||||||
|
You can find a list of available variables [in the bootstrap source](https://github.com/twbs/bootstrap/blob/v5.1.3/scss/_variables.scss), or get explanations on them in the [Bootstrap docs](https://getbootstrap.com/docs/5.1/customize/sass/).
|
||||||
|
|
||||||
|
Bootstrap's javascript as well as its dependencies are concatenated into a single file: `static/js/vendors.js`.
|
||||||
|
{%- endif %}
|
|
@ -1,173 +0,0 @@
|
||||||
{{cookiecutter.project_name}}
|
|
||||||
{{ '=' * cookiecutter.project_name|length }}
|
|
||||||
|
|
||||||
{{cookiecutter.description}}
|
|
||||||
|
|
||||||
.. image:: https://img.shields.io/badge/built%20with-Cookiecutter%20Django-ff69b4.svg
|
|
||||||
:target: https://github.com/pydanny/cookiecutter-django/
|
|
||||||
:alt: Built with Cookiecutter Django
|
|
||||||
.. image:: https://img.shields.io/badge/code%20style-black-000000.svg
|
|
||||||
:target: https://github.com/ambv/black
|
|
||||||
:alt: Black code style
|
|
||||||
{% if cookiecutter.open_source_license != "Not open source" %}
|
|
||||||
|
|
||||||
:License: {{cookiecutter.open_source_license}}
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
Settings
|
|
||||||
--------
|
|
||||||
|
|
||||||
Moved to settings_.
|
|
||||||
|
|
||||||
.. _settings: http://cookiecutter-django.readthedocs.io/en/latest/settings.html
|
|
||||||
|
|
||||||
Basic Commands
|
|
||||||
--------------
|
|
||||||
|
|
||||||
Setting Up Your Users
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
* To create a **normal user account**, just go to Sign Up and fill out the form. Once you submit it, you'll see a "Verify Your E-mail Address" page. Go to your console to see a simulated email verification message. Copy the link into your browser. Now the user's email should be verified and ready to go.
|
|
||||||
|
|
||||||
* To create an **superuser account**, use this command::
|
|
||||||
|
|
||||||
$ python manage.py createsuperuser
|
|
||||||
|
|
||||||
For convenience, you can keep your normal user logged in on Chrome and your superuser logged in on Firefox (or similar), so that you can see how the site behaves for both kinds of users.
|
|
||||||
|
|
||||||
Type checks
|
|
||||||
^^^^^^^^^^^
|
|
||||||
|
|
||||||
Running type checks with mypy:
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
$ mypy {{cookiecutter.project_slug}}
|
|
||||||
|
|
||||||
Test coverage
|
|
||||||
^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
To run the tests, check your test coverage, and generate an HTML coverage report::
|
|
||||||
|
|
||||||
$ coverage run -m pytest
|
|
||||||
$ coverage html
|
|
||||||
$ open htmlcov/index.html
|
|
||||||
|
|
||||||
Running tests with py.test
|
|
||||||
~~~~~~~~~~~~~~~~~~~~~~~~~~
|
|
||||||
|
|
||||||
::
|
|
||||||
|
|
||||||
$ pytest
|
|
||||||
|
|
||||||
Live reloading and Sass CSS compilation
|
|
||||||
^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^^
|
|
||||||
|
|
||||||
Moved to `Live reloading and SASS compilation`_.
|
|
||||||
|
|
||||||
.. _`Live reloading and SASS compilation`: http://cookiecutter-django.readthedocs.io/en/latest/live-reloading-and-sass-compilation.html
|
|
||||||
|
|
||||||
{% if cookiecutter.use_celery == "y" %}
|
|
||||||
|
|
||||||
Celery
|
|
||||||
^^^^^^
|
|
||||||
|
|
||||||
This app comes with Celery.
|
|
||||||
|
|
||||||
To run a celery worker:
|
|
||||||
|
|
||||||
.. code-block:: bash
|
|
||||||
|
|
||||||
cd {{cookiecutter.project_slug}}
|
|
||||||
celery -A config.celery_app worker -l info
|
|
||||||
|
|
||||||
Please note: For Celery's import magic to work, it is important *where* the celery commands are run. If you are in the same folder with *manage.py*, you should be right.
|
|
||||||
|
|
||||||
{% endif %}
|
|
||||||
{% if cookiecutter.use_mailhog == "y" %}
|
|
||||||
|
|
||||||
Email Server
|
|
||||||
^^^^^^^^^^^^
|
|
||||||
{% if cookiecutter.use_docker == 'y' %}
|
|
||||||
In development, it is often nice to be able to see emails that are being sent from your application. For that reason local SMTP server `MailHog`_ with a web interface is available as docker container.
|
|
||||||
|
|
||||||
Container mailhog will start automatically when you will run all docker containers.
|
|
||||||
Please check `cookiecutter-django Docker documentation`_ for more details how to start all containers.
|
|
||||||
|
|
||||||
With MailHog running, to view messages that are sent by your application, open your browser and go to ``http://127.0.0.1:8025``
|
|
||||||
{% else %}
|
|
||||||
In development, it is often nice to be able to see emails that are being sent from your application. If you choose to use `MailHog`_ when generating the project a local SMTP server with a web interface will be available.
|
|
||||||
|
|
||||||
#. `Download the latest MailHog release`_ for your OS.
|
|
||||||
|
|
||||||
#. Rename the build to ``MailHog``.
|
|
||||||
|
|
||||||
#. Copy the file to the project root.
|
|
||||||
|
|
||||||
#. Make it executable: ::
|
|
||||||
|
|
||||||
$ chmod +x MailHog
|
|
||||||
|
|
||||||
#. Spin up another terminal window and start it there: ::
|
|
||||||
|
|
||||||
./MailHog
|
|
||||||
|
|
||||||
#. Check out `<http://127.0.0.1:8025/>`_ to see how it goes.
|
|
||||||
|
|
||||||
Now you have your own mail server running locally, ready to receive whatever you send it.
|
|
||||||
|
|
||||||
.. _`Download the latest MailHog release`: https://github.com/mailhog/MailHog/releases
|
|
||||||
{% endif %}
|
|
||||||
.. _mailhog: https://github.com/mailhog/MailHog
|
|
||||||
{% endif %}
|
|
||||||
{% if cookiecutter.use_sentry == "y" %}
|
|
||||||
|
|
||||||
Sentry
|
|
||||||
^^^^^^
|
|
||||||
|
|
||||||
Sentry is an error logging aggregator service. You can sign up for a free account at https://sentry.io/signup/?code=cookiecutter or download and host it yourself.
|
|
||||||
The system is setup with reasonable defaults, including 404 logging and integration with the WSGI application.
|
|
||||||
|
|
||||||
You must set the DSN url in production.
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
Deployment
|
|
||||||
----------
|
|
||||||
|
|
||||||
The following details how to deploy this application.
|
|
||||||
{% if cookiecutter.use_heroku.lower() == "y" %}
|
|
||||||
|
|
||||||
Heroku
|
|
||||||
^^^^^^
|
|
||||||
|
|
||||||
See detailed `cookiecutter-django Heroku documentation`_.
|
|
||||||
|
|
||||||
.. _`cookiecutter-django Heroku documentation`: http://cookiecutter-django.readthedocs.io/en/latest/deployment-on-heroku.html
|
|
||||||
{% endif %}
|
|
||||||
{% if cookiecutter.use_docker.lower() == "y" %}
|
|
||||||
|
|
||||||
Docker
|
|
||||||
^^^^^^
|
|
||||||
|
|
||||||
See detailed `cookiecutter-django Docker documentation`_.
|
|
||||||
|
|
||||||
.. _`cookiecutter-django Docker documentation`: http://cookiecutter-django.readthedocs.io/en/latest/deployment-with-docker.html
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
{% if cookiecutter.custom_bootstrap_compilation == "y" %}
|
|
||||||
Custom Bootstrap Compilation
|
|
||||||
^^^^^^
|
|
||||||
|
|
||||||
The generated CSS is set up with automatic Bootstrap recompilation with variables of your choice.
|
|
||||||
Bootstrap v4 is installed using npm and customised by tweaking your variables in ``static/sass/custom_bootstrap_vars``.
|
|
||||||
|
|
||||||
You can find a list of available variables `in the bootstrap source`_, or get explanations on them in the `Bootstrap docs`_.
|
|
||||||
|
|
||||||
{% if cookiecutter.js_task_runner == 'Gulp' %}
|
|
||||||
Bootstrap's javascript as well as its dependencies is concatenated into a single file: ``static/js/vendors.js``.
|
|
||||||
{% endif %}
|
|
||||||
|
|
||||||
.. _in the bootstrap source: https://github.com/twbs/bootstrap/blob/v4-dev/scss/_variables.scss
|
|
||||||
.. _Bootstrap docs: https://getbootstrap.com/docs/4.1/getting-started/theming/
|
|
||||||
|
|
||||||
{% endif %}
|
|
22
{{cookiecutter.project_slug}}/bin/post_compile
Normal file
22
{{cookiecutter.project_slug}}/bin/post_compile
Normal file
|
@ -0,0 +1,22 @@
|
||||||
|
#!/usr/bin/env bash
|
||||||
|
|
||||||
|
compress_enabled() {
|
||||||
|
python << END
|
||||||
|
import sys
|
||||||
|
|
||||||
|
from environ import Env
|
||||||
|
|
||||||
|
env = Env(COMPRESS_ENABLED=(bool, True))
|
||||||
|
if env('COMPRESS_ENABLED'):
|
||||||
|
sys.exit(0)
|
||||||
|
else:
|
||||||
|
sys.exit(1)
|
||||||
|
|
||||||
|
END
|
||||||
|
}
|
||||||
|
|
||||||
|
if compress_enabled
|
||||||
|
then
|
||||||
|
python manage.py compress
|
||||||
|
fi
|
||||||
|
python manage.py collectstatic --noinput
|
|
@ -1,22 +1,57 @@
|
||||||
FROM python:3.8-slim-buster
|
ARG PYTHON_VERSION=3.10-slim-bullseye
|
||||||
|
|
||||||
|
# define an alias for the specfic python version used in this file.
|
||||||
|
FROM python:${PYTHON_VERSION} as python
|
||||||
|
|
||||||
|
# Python build stage
|
||||||
|
FROM python as python-build-stage
|
||||||
|
|
||||||
|
ARG BUILD_ENVIRONMENT=local
|
||||||
|
|
||||||
|
# Install apt packages
|
||||||
|
RUN apt-get update && apt-get install --no-install-recommends -y \
|
||||||
|
# dependencies for building Python packages
|
||||||
|
build-essential \
|
||||||
|
# psycopg2 dependencies
|
||||||
|
libpq-dev
|
||||||
|
|
||||||
|
# Requirements are installed here to ensure they will be cached.
|
||||||
|
COPY ./requirements .
|
||||||
|
|
||||||
|
# Create Python Dependency and Sub-Dependency Wheels.
|
||||||
|
RUN pip wheel --wheel-dir /usr/src/app/wheels \
|
||||||
|
-r ${BUILD_ENVIRONMENT}.txt
|
||||||
|
|
||||||
|
|
||||||
|
# Python 'run' stage
|
||||||
|
FROM python as python-run-stage
|
||||||
|
|
||||||
|
ARG BUILD_ENVIRONMENT=local
|
||||||
|
ARG APP_HOME=/app
|
||||||
|
|
||||||
ENV PYTHONUNBUFFERED 1
|
ENV PYTHONUNBUFFERED 1
|
||||||
ENV PYTHONDONTWRITEBYTECODE 1
|
ENV PYTHONDONTWRITEBYTECODE 1
|
||||||
|
ENV BUILD_ENV ${BUILD_ENVIRONMENT}
|
||||||
|
|
||||||
RUN apt-get update \
|
WORKDIR ${APP_HOME}
|
||||||
# dependencies for building Python packages
|
|
||||||
&& apt-get install -y build-essential \
|
# Install required system dependencies
|
||||||
|
RUN apt-get update && apt-get install --no-install-recommends -y \
|
||||||
# psycopg2 dependencies
|
# psycopg2 dependencies
|
||||||
&& apt-get install -y libpq-dev \
|
libpq-dev \
|
||||||
# Translations dependencies
|
# Translations dependencies
|
||||||
&& apt-get install -y gettext \
|
gettext \
|
||||||
# cleaning up unused files
|
# cleaning up unused files
|
||||||
&& apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false \
|
&& apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false \
|
||||||
&& rm -rf /var/lib/apt/lists/*
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
# Requirements are installed here to ensure they will be cached.
|
# All absolute dir copies ignore workdir instruction. All relative dir copies are wrt to the workdir instruction
|
||||||
COPY ./requirements /requirements
|
# copy python dependency wheels from python-build-stage
|
||||||
RUN pip install -r /requirements/local.txt
|
COPY --from=python-build-stage /usr/src/app/wheels /wheels/
|
||||||
|
|
||||||
|
# use wheels to install python dependencies
|
||||||
|
RUN pip install --no-cache-dir --no-index --find-links=/wheels/ /wheels/* \
|
||||||
|
&& rm -rf /wheels/
|
||||||
|
|
||||||
COPY ./compose/production/django/entrypoint /entrypoint
|
COPY ./compose/production/django/entrypoint /entrypoint
|
||||||
RUN sed -i 's/\r$//g' /entrypoint
|
RUN sed -i 's/\r$//g' /entrypoint
|
||||||
|
@ -25,6 +60,7 @@ RUN chmod +x /entrypoint
|
||||||
COPY ./compose/local/django/start /start
|
COPY ./compose/local/django/start /start
|
||||||
RUN sed -i 's/\r$//g' /start
|
RUN sed -i 's/\r$//g' /start
|
||||||
RUN chmod +x /start
|
RUN chmod +x /start
|
||||||
|
|
||||||
{% if cookiecutter.use_celery == "y" %}
|
{% if cookiecutter.use_celery == "y" %}
|
||||||
COPY ./compose/local/django/celery/worker/start /start-celeryworker
|
COPY ./compose/local/django/celery/worker/start /start-celeryworker
|
||||||
RUN sed -i 's/\r$//g' /start-celeryworker
|
RUN sed -i 's/\r$//g' /start-celeryworker
|
||||||
|
@ -38,6 +74,8 @@ COPY ./compose/local/django/celery/flower/start /start-flower
|
||||||
RUN sed -i 's/\r$//g' /start-flower
|
RUN sed -i 's/\r$//g' /start-flower
|
||||||
RUN chmod +x /start-flower
|
RUN chmod +x /start-flower
|
||||||
{% endif %}
|
{% endif %}
|
||||||
WORKDIR /app
|
|
||||||
|
# copy application code to WORKDIR
|
||||||
|
COPY . ${APP_HOME}
|
||||||
|
|
||||||
ENTRYPOINT ["/entrypoint"]
|
ENTRYPOINT ["/entrypoint"]
|
||||||
|
|
|
@ -5,4 +5,4 @@ set -o nounset
|
||||||
|
|
||||||
|
|
||||||
rm -f './celerybeat.pid'
|
rm -f './celerybeat.pid'
|
||||||
celery -A config.celery_app beat -l INFO
|
exec watchfiles celery.__main__.main --args '-A config.celery_app beat -l INFO'
|
||||||
|
|
|
@ -3,8 +3,6 @@
|
||||||
set -o errexit
|
set -o errexit
|
||||||
set -o nounset
|
set -o nounset
|
||||||
|
|
||||||
|
exec watchfiles celery.__main__.main \
|
||||||
celery flower \
|
--args \
|
||||||
--app=config.celery_app \
|
"-A config.celery_app -b \"${CELERY_BROKER_URL}\" flower --basic_auth=\"${CELERY_FLOWER_USER}:${CELERY_FLOWER_PASSWORD}\""
|
||||||
--broker="${CELERY_BROKER_URL}" \
|
|
||||||
--basic_auth="${CELERY_FLOWER_USER}:${CELERY_FLOWER_PASSWORD}"
|
|
||||||
|
|
|
@ -4,4 +4,4 @@ set -o errexit
|
||||||
set -o nounset
|
set -o nounset
|
||||||
|
|
||||||
|
|
||||||
celery -A config.celery_app worker -l INFO
|
exec watchfiles celery.__main__.main --args '-A config.celery_app worker -l INFO'
|
||||||
|
|
|
@ -7,7 +7,7 @@ set -o nounset
|
||||||
|
|
||||||
python manage.py migrate
|
python manage.py migrate
|
||||||
{%- if cookiecutter.use_async == 'y' %}
|
{%- if cookiecutter.use_async == 'y' %}
|
||||||
uvicorn config.asgi:application --host 0.0.0.0 --reload
|
exec uvicorn config.asgi:application --host 0.0.0.0 --reload --reload-include '*.html'
|
||||||
{%- else %}
|
{%- else %}
|
||||||
python manage.py runserver_plus 0.0.0.0:8000
|
exec python manage.py runserver_plus 0.0.0.0:8000
|
||||||
{% endif %}
|
{%- endif %}
|
||||||
|
|
64
{{cookiecutter.project_slug}}/compose/local/docs/Dockerfile
Normal file
64
{{cookiecutter.project_slug}}/compose/local/docs/Dockerfile
Normal file
|
@ -0,0 +1,64 @@
|
||||||
|
ARG PYTHON_VERSION=3.10-slim-bullseye
|
||||||
|
|
||||||
|
# define an alias for the specfic python version used in this file.
|
||||||
|
FROM python:${PYTHON_VERSION} as python
|
||||||
|
|
||||||
|
|
||||||
|
# Python build stage
|
||||||
|
FROM python as python-build-stage
|
||||||
|
|
||||||
|
ENV PYTHONDONTWRITEBYTECODE 1
|
||||||
|
|
||||||
|
RUN apt-get update && apt-get install --no-install-recommends -y \
|
||||||
|
# dependencies for building Python packages
|
||||||
|
build-essential \
|
||||||
|
# psycopg2 dependencies
|
||||||
|
libpq-dev \
|
||||||
|
# cleaning up unused files
|
||||||
|
&& apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# Requirements are installed here to ensure they will be cached.
|
||||||
|
COPY ./requirements /requirements
|
||||||
|
|
||||||
|
# create python dependency wheels
|
||||||
|
RUN pip wheel --no-cache-dir --wheel-dir /usr/src/app/wheels \
|
||||||
|
-r /requirements/local.txt -r /requirements/production.txt \
|
||||||
|
&& rm -rf /requirements
|
||||||
|
|
||||||
|
|
||||||
|
# Python 'run' stage
|
||||||
|
FROM python as python-run-stage
|
||||||
|
|
||||||
|
ARG BUILD_ENVIRONMENT
|
||||||
|
ENV PYTHONUNBUFFERED 1
|
||||||
|
ENV PYTHONDONTWRITEBYTECODE 1
|
||||||
|
|
||||||
|
RUN apt-get update && apt-get install --no-install-recommends -y \
|
||||||
|
# To run the Makefile
|
||||||
|
make \
|
||||||
|
# psycopg2 dependencies
|
||||||
|
libpq-dev \
|
||||||
|
# Translations dependencies
|
||||||
|
gettext \
|
||||||
|
# Uncomment below lines to enable Sphinx output to latex and pdf
|
||||||
|
# texlive-latex-recommended \
|
||||||
|
# texlive-fonts-recommended \
|
||||||
|
# texlive-latex-extra \
|
||||||
|
# latexmk \
|
||||||
|
# cleaning up unused files
|
||||||
|
&& apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false \
|
||||||
|
&& rm -rf /var/lib/apt/lists/*
|
||||||
|
|
||||||
|
# copy python dependency wheels from python-build-stage
|
||||||
|
COPY --from=python-build-stage /usr/src/app/wheels /wheels
|
||||||
|
|
||||||
|
# use wheels to install python dependencies
|
||||||
|
RUN pip install --no-cache /wheels/* \
|
||||||
|
&& rm -rf /wheels
|
||||||
|
|
||||||
|
COPY ./compose/local/docs/start /start-docs
|
||||||
|
RUN sed -i 's/\r$//g' /start-docs
|
||||||
|
RUN chmod +x /start-docs
|
||||||
|
|
||||||
|
WORKDIR /docs
|
7
{{cookiecutter.project_slug}}/compose/local/docs/start
Normal file
7
{{cookiecutter.project_slug}}/compose/local/docs/start
Normal file
|
@ -0,0 +1,7 @@
|
||||||
|
#!/bin/bash
|
||||||
|
|
||||||
|
set -o errexit
|
||||||
|
set -o pipefail
|
||||||
|
set -o nounset
|
||||||
|
|
||||||
|
exec make livehtml
|
|
@ -1,4 +1,4 @@
|
||||||
FROM node:10-stretch-slim
|
FROM node:16-bullseye-slim
|
||||||
|
|
||||||
WORKDIR /app
|
WORKDIR /app
|
||||||
|
|
||||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user