diff --git a/.github/FUNDING.yml b/.github/FUNDING.yml new file mode 100644 index 00000000..9f4c97f3 --- /dev/null +++ b/.github/FUNDING.yml @@ -0,0 +1,12 @@ +# These are supported funding model platforms + +github: # Replace with up to 4 GitHub Sponsors-enabled usernames e.g., [user1, user2] +patreon: danielroygreenfeld +open_collective: # Replace with a single Open Collective username +ko_fi: # Replace with a single Ko-fi username +tidelift: # Replace with a single Tidelift platform-name/package-name e.g., npm/babel +community_bridge: # Replace with a single Community Bridge project-name e.g., cloud-foundry +liberapay: # Replace with a single Liberapay username +issuehunt: # Replace with a single IssueHunt username +otechie: # Replace with a single Otechie username +custom: ['https://www.patreon.com/browniebroke'] diff --git a/.pyup.yml b/.pyup.yml new file mode 100644 index 00000000..4978524e --- /dev/null +++ b/.pyup.yml @@ -0,0 +1,17 @@ +# configure updates globally +# default: all +# allowed: all, insecure, False +update: all + +# configure dependency pinning globally +# default: True +# allowed: True, False +pin: True + +# Specify requirement files by hand, pyup seems to struggle to +# find the ones in the project_slug folder +requirements: + - "requirements.txt" + - "{{cookiecutter.project_slug}}/requirements/base.txt" + - "{{cookiecutter.project_slug}}/requirements/local.txt" + - "{{cookiecutter.project_slug}}/requirements/production.txt" diff --git a/.travis.yml b/.travis.yml index a46726d6..925d82e7 100644 --- a/.travis.yml +++ b/.travis.yml @@ -1,22 +1,37 @@ -sudo: required +dist: xenial services: - docker language: python -python: 3.6 - -env: - - TOX_ENV=py36 +python: 3.7 before_install: - docker-compose -v - docker -v -script: - - tox -e $TOX_ENV - - sh tests/test_docker.sh +matrix: + include: + - name: Test results + script: tox -e py37 + - name: Run flake8 on result + script: tox -e flake8 + - name: Run black on result + script: tox -e black + - name: Black template + script: tox -e black-template + - name: Basic Docker + script: sh tests/test_docker.sh + - name: Docker with Celery + script: sh tests/test_docker.sh use_celery=y + - name: Bare metal + script: sh tests/test_bare.sh use_celery=y use_compressor=y + services: + - postgresql + - redis-server + env: + - CELERY_BROKER_URL=redis://localhost:6379/0 install: - pip install tox diff --git a/CHANGELOG.md b/CHANGELOG.md index 304732b9..ae8f7efe 100644 --- a/CHANGELOG.md +++ b/CHANGELOG.md @@ -1,6 +1,258 @@ # Change Log All enhancements and patches to Cookiecutter Django will be documented in this file. -This project adheres to [Semantic Versioning](http://semver.org/). + +## [2020-01-23] +### Changed +- Fix UserFactory to set the password if provided (@BoPeng) +- Update documentation files with latest Sphinx (@howiezhao) + +## [2020-01-12] +### Changed +- Fix mypy setup and added django-stubs (@danifus) +- Add Gitlab CI as option (@ikhomutov) + +## [2020-01-11] +### Changed +- Speed up & reduce size for production Django image (@maxp) +- Bumped runtime version for Heroku (@Isaac12x) +- Added Debian 10 (Buster) OS dependencies (@ddiazpinto) +- Update Traefik to v2 (@blaxpy) +- Switched Docker images from Alpine based to Debian based (@trungdong) + +## [2019-10-06] +### Changed +- Default Python version is now 3.7 (@nicolas471) + +## [2019-10-04] +### Fixed +- Fix static files handling on GCP (@caioariede) + +## [2019-10-03] +### Fixed +- Fix incompatible combination between Whitenoise and no cloud provider (@caioariede) + +## [2019-07-09] +### Fixed +- Always use test settings in pytest (@danihodovic) +- Remove gunicorn from `INSTALLED_APPS` (@danihodovic) +- Remove `EMAIL_HOST` and `EMAIL_PORT` with locmem backend (@danihodovic) + +### Added +- Add `EMAIL_TIMEOUT` (@danihodovic) + +## [2019-06-22] +### Fixed +- Remove redundant template debug setting (@danihodovic) + +## [2019-06-19] +### Fixed +- Fix removal carriage returns in docker scripts (@timclaessens) + +## [2019-06-15] +### Fixed +- Issue with Pycharm setup for running things in Docker compose (@foarsitter) + +## [2019-06-06] +### Changed +- Update generated Travis config (@browniebroke) + +## [2019-06-03] +### Added +- Installed `django-celery-beat` to keep scheduled tasks in DB (@keyvanm) + +## [2019-05-28] +### Changed +- Use GCP acronym rather than inconsistent GCE/GCS (@tanoabeleyra) + +## [2019-05-27] +### Changed +- Made cloud provider optional (@tanoabeleyra) +- Updated to Django 2.2.1 (@browniebroke) + +### Fixed +- Celery worker-related setting names (@browniebroke) + +## [2019-05-18] +### Removed +- Remove the user list view (@browniebroke) + +### Fixed +- Static storage default ACL (@browniebroke) + +## [2019-05-17] +### Fixed +- Added `LocaleMiddleware` to the list of middlewares (@tanoabeleyra) +- Added `LOCALE_PATH` to settings (@tanoabeleyra) + +## [2019-05-16] +### Changed +- Users app to have a translated verbose name (@tanoabeleyra) +- Logging configuration for local (@browniebroke) + +## [2019-05-08] +### Changed +- Upgraded to Django 2.1 (@browniebroke) + +## [2019-04-07] +### Added +- Support for Google Cloud Storage (@ahhda) + +## [2019-04-03] +### Added +- Command to backup Db to AWS S3 (@foarsitter) + +## [2019-03-25] +### Added +- Node image to run Gulp with Docker (@browniebroke) + +## [2019-03-19] +### Changed +- Replaced Caddy with Traefik (@demestav) + +## [2019-03-11] +### Changed +- Sentry integration from Raven to Sentry-SDK (@gfabricio) +- Made Redis config conditional on Celery locally (@demestav) + +## [2019-03-11] +### Added +- Automatic migrations on Heroku (@yunti) + +## [2019-03-06] +### Fixed +- Missing script tag in Travis config (@btknu) + +## [2019-03-02] +### Changed +- Celery eager setting in local setting with Docker (@keithjeb) + +## [2019-03-01] +### Updated +- All NPM dependencies (@takkaria) + +## [2018-11-13] +### Changed +- Security settings in Dev (@carlmjohnson) + +## [2018-11-20] +### Fixed +- Passing the CSRF header from the reverse proxy to Django server for DRF (@hpbruna) + +## [2018-11-12] +### Fixed +- Initialisation of Celery app (@glasslion) + +## [2018-10-24] +### Fixed +- Persisting of iPython history between sessions (@davitovmasyan) + +### Added +- Postgres 10.5 option (@jleclanche) + +## [2018-09-18] +### Added +- Included `mypy` in dependencies and run it in tests (@apirobot) + +## [2018-09-18] +### Fixed +- Avoid `$` in environment variables to workaround a bug from django-environ (@browniebroke) + +## [2018-09-16] +### Fixed +- Bug in ordering of Middleware for production config (@ChrisPappalardo) + +## [2018-09-12] +### Fixed +- URLs for Static and Media for S3 buckets in regions other than N. Virginia (@umrashrf) + +## [2018-09-09] +### Changed +- Name of static and media storage classes (@sfdye) + +## [2018-09-01] +### Changed +- Make static and media storage fully-fledged classes (@erfaan) + +## [2018-08-28] +### Fixed +- Running tests in docker test script (@apirobot) + +## [2018-07-23] +### Changed +- Test commands to use pytest (@jcass77) + +### Removed +- Some hacks leftovers from Bootstrap v4 beta in `project.js` (@hendrikschneider) + +## [2018-07-12] +### Changed +- Upgraded to Bootstrap 4.1.1 (@mostaszewski) + +## [2018-06-25] +### Added +- Flower integration with Docker (@webyneter) + +## [2018-06-25] +### Changed +- Rewrite user app test to use a pytest style (@webyneter) + +## [2018-06-21] +### Added +- Extend & update Celery config (@webyneter & @apirobot) + +## [2018-05-25] +### Fixed +- Build issues due to incompatibility between libressl & openssl (@SassanoM) + +## [2018-05-21] +### Changed +- Updated Caddy to 0.11 and pin its version (@webyneter) + +## [2018-05-14] +### Changed +- Replace `awesome-slugify` by `python-slugify` (@hongquan) +- Migrate to Django 2.0+ URL style (@saschalalala) + +## [2018-05-05] +### Fixed +- Postgres backup & restore commands (@webyneter) + +## [2018-04-10] +### Changed +- Simplify configuration (@danidee10) + +## [2018-04-08] +### Added +- Adopt Black code style (@pydanny) + +## [2018-03-27] +### Fixed +- Simplified extra Celery config generated when opted out (@webyneter) + +## [2018-03-21] +### Removed +- Remove Opbeat support (@sfdye) + +## [2018-03-16] +### Fixed +- Install `psycopg2-binary` when using Docker locally (@browniebroke) + +## [2018-03-14] +### Fixed +- Fixed and improved Postgres backup & restore scripts (@webyneter) + +## [2018-03-10] +### Changed +- Simplify Mailgun setting (@browniebroke) + +## [2018-03-06] +### Changed +- Convert string formatting to f-strings (@sfdye) + +## [2018-03-01] +### Changed +- Celery to use JSON serialization by default (@adammsteele) +- Use Docker version from Travis to run tests (@browniebroke) ## [2018-02-16] ### Changed diff --git a/CONTRIBUTING.rst b/CONTRIBUTING.rst index cfe16740..cb59ae5f 100644 --- a/CONTRIBUTING.rst +++ b/CONTRIBUTING.rst @@ -39,9 +39,9 @@ To run all tests using various versions of python in virtualenvs defined in tox. It is possible to test with a specific version of python. To do this, the command is:: - $ tox -e py36 + $ tox -e py37 -This will run py.test with the python3.6 interpreter, for example. +This will run py.test with the python3.7 interpreter, for example. To run a particular test with tox for against your current Python version:: diff --git a/CONTRIBUTORS.rst b/CONTRIBUTORS.rst index dc41463f..2554d2cf 100644 --- a/CONTRIBUTORS.rst +++ b/CONTRIBUTORS.rst @@ -7,19 +7,19 @@ Core Developers These contributors have commit flags for the repository, and are able to accept and merge pull requests. -=========================== ================ =========== -Name Github Twitter -=========================== ================ =========== -Daniel Roy Greenfeld `@pydanny`_ @pydanny -Audrey Roy Greenfeld* `@audreyr`_ @audreyr -Fábio C. Barrionuevo da Luz `@luzfcb`_ @luzfcb -Saurabh Kumar `@theskumar`_ @_theskumar +=========================== ================= =========== +Name Github Twitter +=========================== ================= =========== +Daniel Roy Greenfeld `@pydanny`_ @pydanny +Audrey Roy Greenfeld* `@audreyr`_ @audreyr +Fábio C. Barrionuevo da Luz `@luzfcb`_ @luzfcb +Saurabh Kumar `@theskumar`_ @_theskumar Jannis Gebauer `@jayfk`_ -Burhan Khalid `@burhan`_ @burhan -Nikita Shupeyko `@webyneter`_ @webyneter -Bruno Alla               `@browniebroke`_ @_BrunoAlla -Wan Liuyang `@sfdye`_ @sfdye -=========================== ================ =========== +Burhan Khalid `@burhan`_ @burhan +Nikita Shupeyko `@webyneter`_ @webyneter +Bruno Alla               `@browniebroke`_ @_BrunoAlla +Wan Liuyang `@sfdye`_ @sfdye +=========================== ================= =========== *Audrey is also the creator of Cookiecutter. Audrey and Daniel are on the Cookiecutter core team.* @@ -42,11 +42,12 @@ Listed in alphabetical order. Name Github Twitter ========================== ============================ ============== 18 `@dezoito`_ + 2O4 `@2O4`_ a7p `@a7p`_ Aaron Eikenberry `@aeikenberry`_ Adam Bogdał `@bogdal`_ Adam Dobrawy `@ad-m`_ - Adam Steele `@adammsteele` + Adam Steele `@adammsteele`_ Agam Dua Alberto Sanchez `@alb3rto`_ Alex Tsai `@caffodian`_ @@ -64,15 +65,18 @@ Listed in alphabetical order. Areski Belaid `@areski`_ Ashley Camba Barclay Gauld `@yunti`_ - Ben Warren `@bwarren2` + Bartek `@btknu`_ Ben Lopatin + Ben Warren `@bwarren2`_ Benjamin Abel Bert de Miranda `@bertdemiranda`_ Bo Lopker `@blopker`_ + Bo Peng `@BoPeng`_ Bouke Haarsma Brent Payne `@brentpayne`_ @brentpayne - Bartek `@btknu` + Bruce Olivier `@bolivierjr`_ Burhan Khalid            `@burhan`_                   @burhan + Caio Ariede `@caioariede`_ @caioariede Carl Johnson `@carlmjohnson`_ @carlmjohnson Catherine Devlin `@catherinedevlin`_ Cédric Gaspoz `@cgaspoz`_ @@ -84,33 +88,49 @@ Listed in alphabetical order. Christopher Clarke `@chrisdev`_ Cole Mackenzie `@cmackenzie1`_ Collederas `@Collederas`_ + Craig Margieson `@cmargieson`_ Cristian Vargas `@cdvv7788`_ Cullen Rhodes `@c-rhodes`_ + Curtis St Pierre `@curtisstpierre`_ @cstpierre1388 Dan Shultz `@shultz`_ + Dani Hodovic `@danihodovic`_ Daniel Hepper `@dhepper`_ @danielhepper + Daniel Hillier `@danifus`_ Daniele Tricoli `@eriol`_ David Díaz `@ddiazpinto`_ @DavidDiazPinto Davit Tovmasyan `@davitovmasyan`_ Davur Clementsen `@dsclementsen`_ @davur Delio Castillo `@jangeador`_ @jangeador + Demetris Stavrou `@demestav`_ + Denis Bobrov `@delneg`_ Denis Orehovsky `@apirobot`_ - Dónal Adams `@epileptic-fish`_ + Denis Savran `@blaxpy`_ Diane Chen `@purplediane`_ @purplediane88 + Dónal Adams `@epileptic-fish`_ Dong Huynh `@trungdong`_ Emanuel Calso `@bloodpet`_ @bloodpet Eraldo Energy `@eraldo`_ Eric Groom `@ericgroom`_ Eyad Al Sibai `@eyadsibai`_ Felipe Arruda `@arruda`_ + Florian Idelberger `@step21`_ @windrush Garry Cairns `@garry-cairns`_ Garry Polley `@garrypolley`_ + Gilbishkosma `@Gilbishkosma`_ Hamish Durkin `@durkode`_ + Hana Quadara `@hanaquadara`_ + Harry Moreno `@morenoh149`_ @morenoh149 Harry Percival `@hjwp`_ Hendrik Schneider `@hendrikschneider`_ Henrique G. G. Pereira `@ikkebr`_ + Howie Zhao `@howiezhao`_ Ian Lee `@IanLee1521`_ Irfan Ahmad `@erfaan`_ @erfaan + Isaac12x `@Isaac12x`_ + Ivan Khomutov `@ikhomutov`_ Jan Van Bruggen `@jvanbrug`_ + Jelmer Draaijer `@foarsitter`_ + Jerome Caisip `@jeromecaisip`_ Jens Nilsson `@phiberjenz`_ Jerome Leclanche `@jleclanche`_ @Adys Jimmy Gitonga `@afrowave`_ @afrowave @@ -120,13 +140,16 @@ Listed in alphabetical order. Kaido Kert `@kaidokert`_ kappataumu `@kappataumu`_ @kappataumu Kaveh `@ka7eh`_ + Keith Bailey `@keithjeb`_ + Keith Webber `@townie`_ Kevin A. Stone Kevin Ndung'u `@kevgathuku`_ - Keith Webber `@townie`_ + Keyvan Mosharraf `@keyvanm`_ Krzysztof Szumny `@noisy`_ Krzysztof Żuraw `@krzysztofzuraw`_ - Leonardo Jimenez `@xpostudio4`_ + Leo won `@leollon`_ Leo Zhou `@glasslion`_ + Leonardo Jimenez `@xpostudio4`_ Lin Xianyi `@iynaix`_ Luis Nell `@originell`_ Lukas Klein @@ -137,6 +160,7 @@ Listed in alphabetical order. Mateusz Ostaszewski `@mostaszewski`_ Mathijs Hoogland `@MathijsHoogland`_ Matt Braymer-Hayes `@mattayes`_ @mattayes + Matt Knapper `@mknapper1`_ Matt Linares Matt Menzenski `@menzenski`_ Matt Warren `@mfwarren`_ @@ -144,66 +168,86 @@ Listed in alphabetical order. Meghan Heintz `@dot2dotseurat`_ Mesut Yılmaz `@myilmaz`_ Michael Gecht `@mimischi`_ @_mischi + Michael Samoylov `@msamoylov`_ + Min ho Kim `@minho42`_ mozillazg `@mozillazg`_ + Nico Stefani `@nicolas471`_ @moby_dick91 + Oleg Russkin `@rolep`_ Pablo `@oubiga`_ Parbhat Puri `@parbhat`_ Peter Bittner `@bittner`_ Peter Coles `@mrcoles`_ + Philipp Matthies `@canonnervio`_ Pierre Chiquet `@pchiquet`_ - Raphael Pierzina `@hackebrot`_ Raony Guimarães Corrêa `@raonyguimaraes`_ + Raphael Pierzina `@hackebrot`_ Reggie Riser `@reggieriser`_ René Muhl `@rm--`_ Roman Afanaskin `@siauPatrick`_ Roman Osipenko `@romanosipenko`_ Russell Davies - Sascha `@saschalalala` @saschalalala Sam Collins `@MightySCollins`_ + Sascha `@saschalalala`_ @saschalalala Shupeyko Nikita `@webyneter`_ Sławek Ehlert `@slafs`_ Srinivas Nyayapati `@shireenrao`_ stepmr `@stepmr`_ Steve Steiner `@ssteinerX`_ Sule Marshall `@suledev`_ + Tano Abeleyra `@tanoabeleyra`_ Taylor Baldwin Théo Segonds `@show0k`_ + Tim Claessens `@timclaessens`_ Tim Freund `@timfreund`_ Tom Atkins `@knitatoms`_ Tom Offermann Travis McNeill `@Travistock`_ @tavistock_esq Tubo Shi `@Tubo`_ Umair Ashraf `@umrashrf`_ @fabumair + Vadim Iskuchekov `@Egregors`_ @egregors Vitaly Babiy Vivian Guillen `@viviangb`_ + Vlad Doster `@vladdoster`_ Will Farley `@goldhand`_ @g01dhand William Archinal `@archinal`_ + Xaver Y.R. Chen `@yrchen`_ @yrchen Yaroslav Halchenko - Denis Bobrov `@delneg`_ - Philipp Matthies `@canonnervio`_ - Vadim Iskuchekov `@Egregors`_ @egregors - Keith Bailey `@keithjeb`_ + Yuchen Xie `@mapx`_ ========================== ============================ ============== .. _@a7p: https://github.com/a7p +.. _@2O4: https://github.com/2O4 .. _@ad-m: https://github.com/ad-m .. _@adammsteele: https://github.com/adammsteele .. _@aeikenberry: https://github.com/aeikenberry +.. _@afrowave: https://github.com/afrowave +.. _@ahhda: https://github.com/ahhda .. _@alb3rto: https://github.com/alb3rto .. _@ameistad: https://github.com/ameistad .. _@amjith: https://github.com/amjith .. _@andor-pierdelacabeza: https://github.com/andor-pierdelacabeza +.. _@andresgz: https://github.com/andresgz .. _@antoniablair: https://github.com/antoniablair .. _@apirobot: https://github.com/apirobot .. _@archinal: https://github.com/archinal .. _@areski: https://github.com/areski .. _@arruda: https://github.com/arruda +.. _@bertdemiranda: https://github.com/bertdemiranda .. _@bittner: https://github.com/bittner +.. _@blaxpy: https://github.com/blaxpy .. _@bloodpet: https://github.com/bloodpet .. _@blopker: https://github.com/blopker .. _@bogdal: https://github.com/bogdal +.. _@bolivierjr: https://github.com/bolivierjr +.. _@BoPeng: https://github.com/BoPeng +.. _@brentpayne: https://github.com/brentpayne +.. _@btknu: https://github.com/btknu .. _@burhan: https://github.com/burhan +.. _@bwarren2: https://github.com/bwarren2 .. _@c-rhodes: https://github.com/c-rhodes .. _@caffodian: https://github.com/caffodian +.. _@canonnervio: https://github.com/canonnervio +.. _@caioariede: https://github.com/caioariede .. _@carlmjohnson: https://github.com/carlmjohnson .. _@catherinedevlin: https://github.com/catherinedevlin .. _@ccurvey: https://github.com/ccurvey @@ -213,94 +257,119 @@ Listed in alphabetical order. .. _@ChrisPappalardo: https://github.com/ChrisPappalardo .. _@chuckus: https://github.com/chuckus .. _@cmackenzie1: https://github.com/cmackenzie1 +.. _@cmargieson: https://github.com/cmargieson .. _@Collederas: https://github.com/Collederas +.. _@curtisstpierre: https://github.com/curtisstpierre +.. _@dadokkio: https://github.com/dadokkio +.. _@danihodovic: https://github.com/danihodovic +.. _@danifus: https://github.com/danifus .. _@davitovmasyan: https://github.com/davitovmasyan .. _@ddiazpinto: https://github.com/ddiazpinto +.. _@delneg: https://github.com/delneg +.. _@demestav: https://github.com/demestav .. _@dezoito: https://github.com/dezoito .. _@dhepper: https://github.com/dhepper .. _@dot2dotseurat: https://github.com/dot2dotseurat .. _@dsclementsen: https://github.com/dsclementsen .. _@durkode: https://github.com/durkode +.. _@Egregors: https://github.com/Egregors .. _@epileptic-fish: https://gihub.com/epileptic-fish .. _@eraldo: https://github.com/eraldo .. _@erfaan: https://github.com/erfaan +.. _@ericgroom: https://github.com/ericgroom .. _@eriol: https://github.com/eriol .. _@eyadsibai: https://github.com/eyadsibai .. _@flyudvik: https://github.com/flyudvik +.. _@foarsitter: https://github.com/foarsitter .. _@garry-cairns: https://github.com/garry-cairns .. _@garrypolley: https://github.com/garrypolley -.. _@goldhand: https://github.com/goldhand +.. _@Gilbishkosma: https://github.com/Gilbishkosma .. _@glasslion: https://github.com/glasslion +.. _@goldhand: https://github.com/goldhand .. _@hackebrot: https://github.com/hackebrot .. _@hairychris: https://github.com/hairychris +.. _@hanaquadara: https://github.com/hanaquadara .. _@hendrikschneider: https://github.com/hendrikschneider .. _@hjwp: https://github.com/hjwp +.. _@howiezhao: https://github.com/howiezhao .. _@IanLee1521: https://github.com/IanLee1521 +.. _@ikhomutov: https://github.com/ikhomutov .. _@ikkebr: https://github.com/ikkebr +.. _@Isaac12x: https://github.com/Isaac12x .. _@iynaix: https://github.com/iynaix +.. _@jangeador: https://github.com/jangeador .. _@jazztpt: https://github.com/jazztpt +.. _@jcass77: https://github.com/jcass77 +.. _@jeromecaisip: https://github.com/jeromecaisip .. _@jleclanche: https://github.com/jleclanche .. _@juliocc: https://github.com/juliocc .. _@jvanbrug: https://github.com/jvanbrug .. _@ka7eh: https://github.com/ka7eh .. _@kaidokert: https://github.com/kaidokert .. _@kappataumu: https://github.com/kappataumu +.. _@keithjeb: https://github.com/keithjeb .. _@kevgathuku: https://github.com/kevgathuku +.. _@keyvanm: https://github.com/keyvanm .. _@knitatoms: https://github.com/knitatoms .. _@krzysztofzuraw: https://github.com/krzysztofzuraw -.. _@msaizar: https://github.com/msaizar +.. _@leollon: https://github.com/leollon .. _@MathijsHoogland: https://github.com/MathijsHoogland +.. _@mapx: https://github.com/mapx .. _@mattayes: https://github.com/mattayes .. _@menzenski: https://github.com/menzenski -.. _@mostaszewski: https://github.com/mostaszewski .. _@mfwarren: https://github.com/mfwarren +.. _@MightySCollins: https://github.com/MightySCollins .. _@mimischi: https://github.com/mimischi +.. _@minho42: https://github.com/minho42 .. _@mjsisley: https://github.com/mjsisley -.. _@myilmaz: https://github.com/myilmaz +.. _@mknapper1: https://github.com/mknapper1 +.. _@morenoh149: https://github.com/morenoh149 +.. _@mostaszewski: https://github.com/mostaszewski .. _@mozillazg: https://github.com/mozillazg +.. _@mrcoles: https://github.com/mrcoles +.. _@msaizar: https://github.com/msaizar +.. _@msamoylov: https://github.com/msamoylov +.. _@myilmaz: https://github.com/myilmaz +.. _@nicolas471: https://github.com/nicolas471 .. _@noisy: https://github.com/noisy .. _@originell: https://github.com/originell .. _@oubiga: https://github.com/oubiga .. _@parbhat: https://github.com/parbhat +.. _@pchiquet: https://github.com/pchiquet +.. _@phiberjenz: https://github.com/phiberjenz +.. _@purplediane: https://github.com/purplediane .. _@raonyguimaraes: https://github.com/raonyguimaraes .. _@reggieriser: https://github.com/reggieriser .. _@rm--: https://github.com/rm-- +.. _@rolep: https://github.com/rolep .. _@romanosipenko: https://github.com/romanosipenko +.. _@saschalalala: https://github.com/saschalalala .. _@shireenrao: https://github.com/shireenrao .. _@show0k: https://github.com/show0k .. _@shultz: https://github.com/shultz .. _@siauPatrick: https://github.com/siauPatrick +.. _@sladinji: https://github.com/sladinji .. _@slafs: https://github.com/slafs .. _@ssteinerX: https://github.com/ssteinerx +.. _@step21: https://github.com/step21 .. _@stepmr: https://github.com/stepmr .. _@suledev: https://github.com/suledev .. _@takkaria: https://github.com/takkaria +.. _@tanoabeleyra: https://github.com/tanoabeleyra +.. _@timclaessens: https://github.com/timclaessens .. _@timfreund: https://github.com/timfreund +.. _@townie: https://github.com/townie .. _@Travistock: https://github.com/Tavistock .. _@trungdong: https://github.com/trungdong .. _@Tubo: https://github.com/tubo +.. _@umrashrf: https://github.com/umrashrf .. _@viviangb: https://github.com/viviangb +.. _@vladdoster: https://github.com/vladdoster .. _@xpostudio4: https://github.com/xpostudio4 +.. _@yrchen: https://github.com/yrchen .. _@yunti: https://github.com/yunti .. _@zcho: https://github.com/zcho -.. _@phiberjenz: https://github.com/phiberjenz -.. _@sladinji: https://github.com/sladinji -.. _@andresgz: https://github.com/andresgz -.. _@jangeador: https://github.com/jangeador -.. _@townie: https://github.com/townie -.. _@MightySCollins: https://github.com/MightySCollins -.. _@dadokkio: https://github.com/dadokkio -.. _@bwarren2: https://github.com/bwarren2 -.. _@bertdemiranda: https://github.com/bertdemiranda -.. _@brentpayne: https://github.com/brentpayne -.. _@afrowave: https://github.com/afrowave -.. _@pchiquet: https://github.com/pchiquet -.. _@delneg: https://github.com/delneg -.. _@purplediane: https://github.com/purplediane -.. _@umrashrf: https://github.com/umrashrf -.. _@ahhda: https://github.com/ahhda -.. _@keithjeb: https://github.com/keithjeb -.. _@btknu: https://github.com/btknu + Special Thanks ~~~~~~~~~~~~~~ diff --git a/README.rst b/README.rst index 37f36204..6e534b0f 100644 --- a/README.rst +++ b/README.rst @@ -9,8 +9,8 @@ Cookiecutter Django :target: https://pyup.io/repos/github/pydanny/cookiecutter-django/ :alt: Updates -.. image:: https://badges.gitter.im/Join Chat.svg - :target: https://gitter.im/pydanny/cookiecutter-django?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge +.. image:: https://img.shields.io/badge/cookiecutter-Join%20on%20Slack-green?style=flat&logo=slack + :target: https://join.slack.com/t/cookie-cutter/shared_invite/enQtNzI0Mzg5NjE5Nzk5LTRlYWI2YTZhYmQ4YmU1Y2Q2NmE1ZjkwOGM0NDQyNTIwY2M4ZTgyNDVkNjMxMDdhZGI5ZGE5YmJjM2M3ODJlY2U .. image:: https://www.codetriage.com/pydanny/cookiecutter-django/badges/users.svg :target: https://www.codetriage.com/pydanny/cookiecutter-django @@ -36,10 +36,10 @@ production-ready Django projects quickly. Features --------- -* For Django 2.0 -* Works with Python 3.6 +* For Django 2.2 +* Works with Python 3.7 * Renders Django projects with 100% starting test coverage -* Twitter Bootstrap_ v4.1.1 (`maintained Foundation fork`_ also available) +* Twitter Bootstrap_ v4 (`maintained Foundation fork`_ also available) * 12-Factor_ based settings via django-environ_ * Secure by default. We believe in SSL. * Optimized development and production settings @@ -47,12 +47,13 @@ Features * Comes with custom user model ready to go * Optional custom static build using Gulp and livereload * Send emails via Anymail_ (using Mailgun_ by default, but switchable) -* Media storage using Amazon S3 -* Docker support using docker-compose_ for development and production (using Caddy_ with LetsEncrypt_ support) +* Media storage using Amazon S3 or Google Cloud Storage +* Docker support using docker-compose_ for development and production (using Traefik_ with LetsEncrypt_ support) * Procfile_ for deploying to Heroku * Instructions for deploying to PythonAnywhere_ -* Run tests with unittest or py.test +* Run tests with unittest or pytest * Customizable PostgreSQL version +* Default integration with pre-commit_ for identifying simple issues before submission to code review .. _`maintained Foundation fork`: https://github.com/Parbhat/cookiecutter-django-foundation @@ -62,7 +63,7 @@ Optional Integrations *These features can be enabled during initial project setup.* -* Serve static files from Amazon S3 or Whitenoise_ +* Serve static files from Amazon S3, Google Cloud Storage or Whitenoise_ * Configuration for Celery_ and Flower_ (the latter in Docker setup only) * Integration with MailHog_ for local email testing * Integration with Sentry_ for error logging @@ -82,15 +83,16 @@ Optional Integrations .. _Sentry: https://sentry.io/welcome/ .. _docker-compose: https://github.com/docker/compose .. _PythonAnywhere: https://www.pythonanywhere.com/ -.. _Caddy: https://caddyserver.com/ +.. _Traefik: https://traefik.io/ .. _LetsEncrypt: https://letsencrypt.org/ +.. _pre-commit: https://github.com/pre-commit/pre-commit Constraints ----------- * Only maintained 3rd party libraries are used. -* Uses PostgreSQL everywhere (9.2+) -* Environment variables for configuration (This won't work with Apache/mod_wsgi except on AWS ELB). +* Uses PostgreSQL everywhere (9.4 - 11.3) +* Environment variables for configuration (This won't work with Apache/mod_wsgi). Support this Project! ---------------------- @@ -106,7 +108,7 @@ Projects that provide financial support to the maintainers: Two Scoops of Django 1.11 ~~~~~~~~~~~~~~~~~~~~~~~~~ -.. image:: https://cdn.shopify.com/s/files/1/0304/6901/products/tsd-111-alpha_medium.jpg?v=1499531513 +.. image:: https://cdn.shopify.com/s/files/1/0304/6901/products/2017-06-29-tsd11-sticker-02.png :name: Two Scoops of Django 1.11 Cover :align: center :alt: Two Scoops of Django @@ -155,7 +157,7 @@ Answer the prompts with your own desired options_. For example:: project_slug [reddit_clone]: reddit author_name [Daniel Roy Greenfeld]: Daniel Greenfeld email [you@example.com]: pydanny@gmail.com - description [A short description of the project.]: A reddit clone. + description [Behold My Awesome Project!]: A reddit clone. domain_name [example.com]: myreddit.com version [0.1.0]: 0.0.1 timezone [UTC]: America/Los_Angeles @@ -169,18 +171,21 @@ Answer the prompts with your own desired options_. For example:: use_heroku [n]: y use_compressor [n]: y Select postgresql_version: - 1 - 10.3 - 2 - 10.2 - 3 - 10.1 - 4 - 9.6 - 5 - 9.5 - 6 - 9.4 - 7 - 9.3 - Choose from 1, 2, 3, 4 [1]: 1 + 1 - 11.3 + 2 - 10.8 + 3 - 9.6 + 4 - 9.5 + 5 - 9.4 + Choose from 1, 2, 3, 4, 5 [1]: 1 Select js_task_runner: 1 - None 2 - Gulp Choose from 1, 2 [1]: 1 + Select cloud_provider: + 1 - AWS + 2 - GCP + 3 - None + Choose from 1, 2, 3 [1]: 1 custom_bootstrap_compilation [n]: n Select open_source_license: 1 - MIT @@ -221,11 +226,11 @@ Community * Have questions? **Before you ask questions anywhere else**, please post your question on `Stack Overflow`_ under the *cookiecutter-django* tag. We check there periodically for questions. * If you think you found a bug or want to request a feature, please open an issue_. -* For anything else, you can chat with us on `Gitter`_. +* For anything else, you can chat with us on `Slack`_. .. _`Stack Overflow`: http://stackoverflow.com/questions/tagged/cookiecutter-django .. _`issue`: https://github.com/pydanny/cookiecutter-django/issues -.. _`Gitter`: https://gitter.im/pydanny/cookiecutter-django?utm_source=badge&utm_medium=badge&utm_campaign=pr-badge&utm_content=badge +.. _`Slack`: https://join.slack.com/t/cookie-cutter/shared_invite/enQtNzI0Mzg5NjE5Nzk5LTRlYWI2YTZhYmQ4YmU1Y2Q2NmE1ZjkwOGM0NDQyNTIwY2M4ZTgyNDVkNjMxMDdhZGI5ZGE5YmJjM2M3ODJlY2U For Readers of Two Scoops of Django -------------------------------------------- diff --git a/cookiecutter.json b/cookiecutter.json index b5dda0c7..4e77d110 100644 --- a/cookiecutter.json +++ b/cookiecutter.json @@ -18,20 +18,22 @@ "use_pycharm": "n", "use_docker": "n", "postgresql_version": [ - "10.5", - "10.4", - "10.3", - "10.2", - "10.1", + "11.3", + "10.8", "9.6", "9.5", - "9.4", - "9.3" + "9.4" ], "js_task_runner": [ "None", "Gulp" ], + "cloud_provider": [ + "AWS", + "GCP", + "None" + ], + "use_drf": "n", "custom_bootstrap_compilation": "n", "use_compressor": "n", "use_celery": "n", @@ -39,7 +41,11 @@ "use_sentry": "n", "use_whitenoise": "n", "use_heroku": "n", - "use_travisci": "n", + "ci_tool": [ + "None", + "Travis", + "Gitlab" + ], "keep_local_envs_in_vcs": "y", "debug": "n" diff --git a/docs/conf.py b/docs/conf.py index e3ddae9a..469aa12d 100644 --- a/docs/conf.py +++ b/docs/conf.py @@ -42,7 +42,7 @@ master_doc = "index" # General information about the project. project = "Cookiecutter Django" -copyright = "2013-2018, Daniel Roy Greenfeld".format(now.year) +copyright = "2013-{}, Daniel Roy Greenfeld".format(now.year) # The version info for the project you're documenting, acts as replacement for # |version| and |release|, also used in various other places throughout the diff --git a/docs/deployment-on-heroku.rst b/docs/deployment-on-heroku.rst index 09953cf8..53e98037 100644 --- a/docs/deployment-on-heroku.rst +++ b/docs/deployment-on-heroku.rst @@ -3,6 +3,9 @@ Deployment on Heroku .. index:: Heroku +Commands to run +--------------- + Run these commands to deploy the project to Heroku: .. code-block:: bash @@ -17,31 +20,28 @@ Run these commands to deploy the project to Heroku: heroku addons:create heroku-redis:hobby-dev - # If using mailgun: heroku addons:create mailgun:starter - heroku addons:create sentry:f1 - heroku config:set PYTHONHASHSEED=random - + heroku config:set WEB_CONCURRENCY=4 - + heroku config:set DJANGO_DEBUG=False heroku config:set DJANGO_SETTINGS_MODULE=config.settings.production heroku config:set DJANGO_SECRET_KEY="$(openssl rand -base64 64)" - - # Generating a 32 character-long random string without any of the visually similiar characters "IOl01": + + # Generating a 32 character-long random string without any of the visually similar characters "IOl01": heroku config:set DJANGO_ADMIN_URL="$(openssl rand -base64 4096 | tr -dc 'A-HJ-NP-Za-km-z2-9' | head -c 32)/" - + # Set this to your Heroku app url, e.g. 'bionic-beaver-28392.herokuapp.com' heroku config:set DJANGO_ALLOWED_HOSTS= - + # Assign with AWS_ACCESS_KEY_ID heroku config:set DJANGO_AWS_ACCESS_KEY_ID= - + # Assign with AWS_SECRET_ACCESS_KEY heroku config:set DJANGO_AWS_SECRET_ACCESS_KEY= - + # Assign with AWS_STORAGE_BUCKET_NAME heroku config:set DJANGO_AWS_STORAGE_BUCKET_NAME= @@ -52,3 +52,70 @@ Run these commands to deploy the project to Heroku: heroku run python manage.py check --deploy heroku open + + +.. warning:: + + .. include:: mailgun.rst + + +Optional actions +---------------- + +Celery +++++++ + +Celery requires a few extra environment variables to be ready operational. Also, the worker is created, +it's in the ``Procfile``, but is turned off by default: + +.. code-block:: bash + + # Set the broker URL to Redis + heroku config:set CELERY_BROKER_URL=`heroku config:get REDIS_URL` + # Scale dyno to 1 instance + heroku ps:scale worker=1 + +Sentry +++++++ + +If you're opted for Sentry error tracking, you can either install it through the `Sentry add-on`_: + +.. code-block:: bash + + heroku addons:create sentry:f1 + + +Or add the DSN for your account, if you already have one: + +.. code-block:: bash + + heroku config:set SENTRY_DSN=https://xxxx@sentry.io/12345 + +.. _Sentry add-on: https://elements.heroku.com/addons/sentry + + +Gulp & Bootstrap compilation +++++++++++++++++++++++++++++ + +If you've opted for a custom bootstrap build, you'll most likely need to setup +your app to use `multiple buildpacks`_: one for Python & one for Node.js: + +.. code-block:: bash + + heroku buildpacks:add --index 1 heroku/nodejs + +At time of writing, this should do the trick: during deployment, +the Heroku should run ``npm install`` and then ``npm build``, +which runs Gulp in cookiecutter-django. + +If things don't work, please refer to the Heroku docs. + +.. _multiple buildpacks: https://devcenter.heroku.com/articles/using-multiple-buildpacks-for-an-app + +About Heroku & Docker +--------------------- + +Although Heroku has some sort of `Docker support`_, it's not supported by cookiecutter-django. +We invite you to follow Heroku documentation about it. + +.. _Docker support: https://devcenter.heroku.com/articles/build-docker-images-heroku-yml diff --git a/docs/deployment-on-pythonanywhere.rst b/docs/deployment-on-pythonanywhere.rst index ea25b3ae..4738d5a5 100644 --- a/docs/deployment-on-pythonanywhere.rst +++ b/docs/deployment-on-pythonanywhere.rst @@ -29,13 +29,13 @@ Once you've been through this one-off config, future deployments are much simple Getting your code and dependencies installed on PythonAnywhere -------------------------------------------------------------- -Make sure your project is fully commited and pushed up to Bitbucket or Github or wherever it may be. Then, log into your PythonAnywhere account, open up a **Bash** console, clone your repo, and create a virtualenv: +Make sure your project is fully committed and pushed up to Bitbucket or Github or wherever it may be. Then, log into your PythonAnywhere account, open up a **Bash** console, clone your repo, and create a virtualenv: .. code-block:: bash git clone # you can also use hg cd my-project-name - mkvirtualenv --python=/usr/bin/python3.6 my-project-name + mkvirtualenv --python=/usr/bin/python3.7 my-project-name pip install -r requirements/production.txt # may take a few minutes @@ -153,7 +153,7 @@ Back on the Web tab, hit **Reload**, and your app should be live! **NOTE:** *you may see security warnings until you set up your SSL certificates. If you -want to supress them temporarily, set DJANGO_SECURE_SSL_REDIRECT to blank. Follow +want to suppress them temporarily, set DJANGO_SECURE_SSL_REDIRECT to blank. Follow the instructions here to get SSL set up: https://help.pythonanywhere.com/pages/SSLOwnDomains/* diff --git a/docs/deployment-with-docker.rst b/docs/deployment-with-docker.rst index f6e21e82..0df50ff4 100644 --- a/docs/deployment-with-docker.rst +++ b/docs/deployment-with-docker.rst @@ -7,8 +7,8 @@ Deployment with Docker Prerequisites ------------- -* Docker 1.10+. -* Docker Compose 1.6+ +* Docker 17.05+. +* Docker Compose 1.17+ Understanding the Docker Compose Setup @@ -19,7 +19,7 @@ Before you begin, check out the ``production.yml`` file in the root of this proj * ``django``: your application running behind ``Gunicorn``; * ``postgres``: PostgreSQL database with the application's relational data; * ``redis``: Redis instance for caching; -* ``caddy``: Caddy web server with HTTPS on by default. +* ``traefik``: Traefik reverse proxy with HTTPS on by default. Provided you have opted for Celery (via setting ``use_celery`` to ``y``) there are three more services: @@ -35,7 +35,15 @@ Configuring the Stack The majority of services above are configured through the use of environment variables. Just check out :ref:`envs` and you will know the drill. -To obtain logs and information about crashes in a production setup, make sure that you have access to an external Sentry instance (e.g. by creating an account with `sentry.io`_), and set the ``SENTRY_DSN`` variable. +To obtain logs and information about crashes in a production setup, make sure that you have access to an external Sentry instance (e.g. by creating an account with `sentry.io`_), and set the ``SENTRY_DSN`` variable. Logs of level `logging.ERROR` are sent as Sentry events. Therefore, in order to send a Sentry event use: + +.. code-block:: python + + import logging + logging.error("This event is sent to Sentry", extra={"": ""}) + +The `extra` parameter allows you to send additional information about the context of this error. + You will probably also need to setup the Mail backend, for example by adding a `Mailgun`_ API key and a `Mailgun`_ sender domain, otherwise, the account creation view will crash and result in a 500 error when the backend attempts to send an email to the account owner. @@ -43,6 +51,11 @@ You will probably also need to setup the Mail backend, for example by adding a ` .. _Mailgun: https://mailgun.com +.. warning:: + + .. include:: mailgun.rst + + Optional: Use AWS IAM Role for EC2 instance ------------------------------------------- @@ -63,11 +76,11 @@ It is always better to deploy a site behind HTTPS and will become crucial as the * Access to the Django admin is set up by default to require HTTPS in production or once *live*. -The Caddy web server used in the default configuration will get you a valid certificate from Lets Encrypt and update it automatically. All you need to do to enable this is to make sure that your DNS records are pointing to the server Caddy runs on. +The Traefik reverse proxy used in the default configuration will get you a valid certificate from Lets Encrypt and update it automatically. All you need to do to enable this is to make sure that your DNS records are pointing to the server Traefik runs on. -You can read more about this here at `Automatic HTTPS`_ in the Caddy docs. +You can read more about this feature and how to configure it, at `Automatic HTTPS`_ in the Traefik docs. -.. _Automatic HTTPS: https://caddyserver.com/docs/automatic-https +.. _Automatic HTTPS: https://docs.traefik.io/configuration/acme/ (Optional) Postgres Data Volume Modifications @@ -112,7 +125,7 @@ If you want to scale your application, run:: docker-compose -f production.yml scale django=4 docker-compose -f production.yml scale celeryworker=2 -.. warning:: don't try to scale ``postgres``, ``celerybeat``, or ``caddy``. +.. warning:: don't try to scale ``postgres``, ``celerybeat``, or ``traefik``. To see how your containers are doing run:: @@ -139,8 +152,10 @@ If you are using ``supervisor``, you can use this file as a starting point:: Move it to ``/etc/supervisor/conf.d/{{cookiecutter.project_slug}}.conf`` and run:: supervisorctl reread + supervisorctl update supervisorctl start {{cookiecutter.project_slug}} For status check, run:: supervisorctl status + diff --git a/docs/developing-locally-docker.rst b/docs/developing-locally-docker.rst index 895140f9..09e68498 100644 --- a/docs/developing-locally-docker.rst +++ b/docs/developing-locally-docker.rst @@ -6,6 +6,12 @@ Getting Up and Running Locally With Docker The steps below will get you up and running with a local development environment. All of these commands assume you are in the root of your generated project. +.. note:: + + If you're new to Docker, please be aware that some resources are cached system-wide + and might reappear if you generate a project multiple times with the same name (e.g. + :ref:`this issue with Postgres `). + Prerequisites ------------- @@ -17,17 +23,6 @@ Prerequisites .. _`installation guide`: https://docs.docker.com/compose/install/ -Attention, Windows Users ------------------------- - -Currently PostgreSQL (``psycopg2`` python package) is not installed inside Docker containers for Windows users, while it is required by the generated Django project. To fix this, add ``psycopg2`` to the list of requirements inside ``requirements/base.txt``:: - - # Python-PostgreSQL Database Adapter - psycopg2==2.6.2 - -Doing this will prevent the project from being installed in an Windows-only environment (thus without usage of Docker). If you want to use this project without Docker, make sure to remove ``psycopg2`` from the requirements again. - - Build the Stack --------------- @@ -105,7 +100,6 @@ The most important thing for us here now is ``env_file`` section enlisting ``./. │   ├── .django │   └── .postgres └── .production - ├── .caddy ├── .django └── .postgres @@ -120,7 +114,7 @@ Consider the aforementioned ``.envs/.local/.postgres``: :: POSTGRES_USER=XgOWtQtJecsAbaIyslwGvFvPawftNaqO POSTGRES_PASSWORD=jSljDz4whHuwO3aJIgVBrqEml5Ycbghorep4uVJ4xjDYQu0LfuTZdctj7y0YcCLu -The three envs we are presented with here are ``POSTGRES_DB``, ``POSTGRES_USER``, and ``POSTGRES_PASSWORD`` (by the way, their values have also been generated for you). You might have figured out already where these definitions will end up; it's all the same with ``django`` and ``caddy`` service container envs. +The three envs we are presented with here are ``POSTGRES_DB``, ``POSTGRES_USER``, and ``POSTGRES_PASSWORD`` (by the way, their values have also been generated for you). You might have figured out already where these definitions will end up; it's all the same with ``django`` service container envs. One final touch: should you ever need to merge ``.envs/production/*`` in a single ``.env`` run the ``merge_production_dotenvs_in_dotenv.py``: :: diff --git a/docs/developing-locally.rst b/docs/developing-locally.rst index 3434f68b..7a58d099 100644 --- a/docs/developing-locally.rst +++ b/docs/developing-locally.rst @@ -9,7 +9,7 @@ Setting Up Development Environment Make sure to have the following on your host: -* Python 3.6 +* Python 3.7 * PostgreSQL_. * Redis_, if using Celery @@ -17,7 +17,7 @@ First things first. #. Create a virtualenv: :: - $ python3.6 -m venv + $ python3.7 -m venv #. Activate the virtualenv you have just created: :: @@ -26,6 +26,12 @@ First things first. #. Install development requirements: :: $ pip install -r requirements/local.txt + $ pre-commit install + + .. note:: + + the `pre-commit` exists in the generated project as default. + for the details of `pre-commit`, follow the [site of pre-commit](https://pre-commit.com/). #. Create a new PostgreSQL database using createdb_: :: @@ -120,12 +126,12 @@ In production, we have Mailgun_ configured to have your back! Celery ------ + If the project is configured to use Celery as a task scheduler then by default tasks are set to run on the main thread -when developing locally. If you have the appropriate setup on your local machine then set +when developing locally. If you have the appropriate setup on your local machine then set the following +in ``config/settings/local.py``:: -CELERY_TASK_ALWAYS_EAGER = False - -in /config/settings/local.py + CELERY_TASK_ALWAYS_EAGER = False Sass Compilation & Live Reloading diff --git a/docs/docker-postgres-backups.rst b/docs/docker-postgres-backups.rst index c1a8a5e0..6ccb7cf1 100644 --- a/docs/docker-postgres-backups.rst +++ b/docs/docker-postgres-backups.rst @@ -85,3 +85,11 @@ You will see something like :: # ... ALTER TABLE SUCCESS: The 'my_project' database has been restored from the '/backups/backup_2018_03_13T09_05_07.sql.gz' backup. + + +Backup to Amazon S3 +---------------------------------- +For uploading your backups to Amazon S3 you can use the aws cli container. There is an upload command for uploading the postgres /backups directory recursively and there is a download command for downloading a specific backup. The default S3 environment variables are used. :: + + $ docker-compose -f production.yml run --rm awscli upload + $ docker-compose -f production.yml run --rm awscli download backup_2018_03_13T09_05_07.sql.gz diff --git a/docs/document.rst b/docs/document.rst new file mode 100644 index 00000000..7207e357 --- /dev/null +++ b/docs/document.rst @@ -0,0 +1,45 @@ +.. _document: + +Document +========= + +This project uses Sphinx_ documentation generator. +After you have set up to `develop locally`_, run the following commands to generate the HTML documentation: :: + + $ sphinx-build docs/ docs/_build/html/ + +If you set up your project to `develop locally with docker`_, run the following command: :: + + $ docker-compose -f local.yml run --rm django sphinx-build docs/ docs/_build/html/ + +Generate API documentation +---------------------------- + +Sphinx can automatically generate documentation from docstrings, to enable this feature, follow these steps: + +1. Add Sphinx extension in ``docs/conf.py`` file, like below: :: + + extensions = [ + 'sphinx.ext.autodoc', + ] + +2. Uncomment the following lines in the ``docs/conf.py`` file: :: + + # import django + # sys.path.insert(0, os.path.abspath('..')) + # os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.local") + # django.setup() + +3. Run the following command: :: + + $ sphinx-apidoc -f -o ./docs/modules/ ./tpub/ migrations/* + + If you set up your project to `develop locally with docker`_, run the following command: :: + + $ docker-compose -f local.yml run --rm django sphinx-apidoc -f -o ./docs/modules ./tpub/ migrations/* + +4. Regenerate HTML documentation as written above. + +.. _Sphinx: https://www.sphinx-doc.org/en/master/index.html +.. _develop locally: ./developing-locally.html +.. _develop locally with docker: ./developing-locally-docker.html diff --git a/docs/index.rst b/docs/index.rst index 5cb07b4b..8e0d04aa 100644 --- a/docs/index.rst +++ b/docs/index.rst @@ -18,6 +18,7 @@ Contents: settings linters testing + document deployment-on-pythonanywhere deployment-on-heroku deployment-with-docker diff --git a/docs/linters.rst b/docs/linters.rst index e59ff0df..2d623218 100644 --- a/docs/linters.rst +++ b/docs/linters.rst @@ -25,7 +25,7 @@ This is included in flake8's checks, but you can also run it separately to see a The config for pylint is located in .pylintrc. It specifies: -* Use the pylint_common and pylint_django plugins. If using Celery, also use pylint_celery. +* Use the pylint_django plugin. If using Celery, also use pylint_celery. * Set max line length to 120 chars * Disable linting messages for missing docstring and invalid name * max-parents=13 diff --git a/docs/mailgun.rst b/docs/mailgun.rst new file mode 100644 index 00000000..1f34e3c8 --- /dev/null +++ b/docs/mailgun.rst @@ -0,0 +1,13 @@ +If your email server used to send email isn't configured properly (Mailgun by default), +attempting to send an email will cause an Internal Server Error. + +By default, django-allauth is setup to `have emails verifications mandatory`_, +which means it'll send a verification email when an unverified user tries to +log-in or when someone tries to sign-up. + +This may happen just after you've setup your Mailgun account, which is running in a +sandbox subdomain by default. Either add your email to the list of authorized recipients +or verify your domain. + + +.. _have emails verifications mandatory: https://django-allauth.readthedocs.io/en/latest/configuration.html?highlight=ACCOUNT_EMAIL_VERIFICATION diff --git a/docs/project-generation-options.rst b/docs/project-generation-options.rst index a5483797..ae47b097 100644 --- a/docs/project-generation-options.rst +++ b/docs/project-generation-options.rst @@ -49,13 +49,11 @@ use_docker: postgresql_version: Select a PostgreSQL_ version to use. The choices are: - 1. 10.3 - 2. 10.2 - 3. 10.1 - 4. 9.6 - 5. 9.5 - 6. 9.4 - 7. 9.3 + 1. 11.3 + 2. 10.8 + 3. 9.6 + 4. 9.5 + 5. 9.4 js_task_runner: Select a JavaScript task runner. The choices are: @@ -63,6 +61,15 @@ js_task_runner: 1. None 2. Gulp_ +cloud_provider: + Select a cloud provider for static & media files. The choices are: + + 1. AWS_ + 2. GCP_ + 3. None + + Note that if you choose no cloud provider, media files won't work. + custom_bootstrap_compilation: Indicates whether the project should support Bootstrap recompilation via the selected JavaScript task runner's task. This can be useful @@ -87,13 +94,17 @@ use_heroku: Indicates whether the project should be configured so as to be deployable to Heroku_. -use_travisci: - Indicates whether the project should be configured to use `Travis CI`_. +ci_tool: + Select a CI tool for running tests. The choices are: + + 1. None + 2. Travis_ + 3. Gitlab_ keep_local_envs_in_vcs: Indicates whether the project's ``.envs/.local/`` should be kept in VCS (comes in handy when working in teams where local environment reproducibility - is strongly encouraged). + is strongly encouraged). Note: .env(s) are only utilized when Docker Compose and/or Heroku support is enabled. debug: @@ -115,6 +126,9 @@ debug: .. _Gulp: https://github.com/gulpjs/gulp +.. _AWS: https://aws.amazon.com/s3/ +.. _GCP: https://cloud.google.com/storage/ + .. _Django Compressor: https://github.com/django-compressor/django-compressor .. _Celery: https://github.com/celery/celery @@ -128,3 +142,6 @@ debug: .. _Heroku: https://github.com/heroku/heroku-buildpack-python .. _Travis CI: https://travis-ci.org/ + +.. _GitLab CI: https://docs.gitlab.com/ee/ci/ + diff --git a/docs/settings.rst b/docs/settings.rst index 26b161a0..e586c963 100644 --- a/docs/settings.rst +++ b/docs/settings.rst @@ -44,11 +44,14 @@ CELERY_BROKER_URL CELERY_BROKER_URL auto w/ Dock DJANGO_AWS_ACCESS_KEY_ID AWS_ACCESS_KEY_ID n/a raises error DJANGO_AWS_SECRET_ACCESS_KEY AWS_SECRET_ACCESS_KEY n/a raises error DJANGO_AWS_STORAGE_BUCKET_NAME AWS_STORAGE_BUCKET_NAME n/a raises error +DJANGO_AWS_S3_REGION_NAME AWS_S3_REGION_NAME n/a None +DJANGO_GCP_STORAGE_BUCKET_NAME GS_BUCKET_NAME n/a raises error +GOOGLE_APPLICATION_CREDENTIALS n/a n/a raises error SENTRY_DSN SENTRY_DSN n/a raises error -DJANGO_SENTRY_CLIENT SENTRY_CLIENT n/a raven.contrib.django.raven_compat.DjangoClient DJANGO_SENTRY_LOG_LEVEL SENTRY_LOG_LEVEL n/a logging.INFO -MAILGUN_API_KEY MAILGUN_ACCESS_KEY n/a raises error +MAILGUN_API_KEY MAILGUN_API_KEY n/a raises error MAILGUN_DOMAIN MAILGUN_SENDER_DOMAIN n/a raises error +MAILGUN_API_URL n/a n/a "https://api.mailgun.net/v3" ======================================= =========================== ============================================== ====================================================================== -------------------------- diff --git a/docs/testing.rst b/docs/testing.rst index 6ca21388..dd6fcb48 100644 --- a/docs/testing.rst +++ b/docs/testing.rst @@ -19,26 +19,26 @@ You will get a readout of the `users` app that has already been set up with test If you set up your project to `develop locally with docker`_, run the following command: :: - $ docker-compose -f local.yml run django pytest + $ docker-compose -f local.yml run --rm django pytest -Targetting particular apps for testing in ``docker`` follows a similar pattern as previously shown above. +Targeting particular apps for testing in ``docker`` follows a similar pattern as previously shown above. Coverage -------- You should build your tests to provide the highest level of **code coverage**. You can run the ``pytest`` with code ``coverage`` by typing in the following command: :: - $ docker-compose -f local.yml run django coverage run -m pytest + $ docker-compose -f local.yml run --rm django coverage run -m pytest Once the tests are complete, in order to see the code coverage, run the following command: :: - $ docker-compose -f local.yml run django coverage report + $ docker-compose -f local.yml run --rm django coverage report .. note:: - At the root of the project folder, you will find the `pytest.ini` file. You can use this to customize_ the ``pytest`` to your liking. + At the root of the project folder, you will find the `pytest.ini` file. You can use this to customize_ the ``pytest`` to your liking. - There is also the `.coveragerc`. This is the configuration file for the ``coverage`` tool. You can find out more about `configuring`_ ``coverage``. + There is also the `.coveragerc`. This is the configuration file for the ``coverage`` tool. You can find out more about `configuring`_ ``coverage``. .. seealso:: @@ -49,8 +49,8 @@ Once the tests are complete, in order to see the code coverage, run the followin Since this is a fresh install, and there are no tests built using the Python `unittest`_ library yet, you should get feedback that says there were no tests carried out. .. _Pytest: https://docs.pytest.org/en/latest/example/simple.html -.. _develop locally: ../developing-locally.rst -.. _develop locally with docker: ..../developing-locally-docker.rst +.. _develop locally: ./developing-locally.html +.. _develop locally with docker: ./developing-locally-docker.html .. _customize: https://docs.pytest.org/en/latest/customize.html .. _unittest: https://docs.python.org/3/library/unittest.html#module-unittest -.. _configuring: https://coverage.readthedocs.io/en/v4.5.x/config.html \ No newline at end of file +.. _configuring: https://coverage.readthedocs.io/en/v4.5.x/config.html diff --git a/docs/troubleshooting.rst b/docs/troubleshooting.rst index d0c0ba43..8aa1b1f9 100644 --- a/docs/troubleshooting.rst +++ b/docs/troubleshooting.rst @@ -3,12 +3,48 @@ Troubleshooting This page contains some advice about errors and problems commonly encountered during the development of Cookiecutter Django applications. +Server Error on sign-up/log-in +------------------------------ + +Make sure you have configured the mail backend (e.g. Mailgun) by adding the API key and sender domain + +.. include:: mailgun.rst + +.. _docker-postgres-auth-failed: + +Docker: Postgres authentication failed +-------------------------------------- + +Examples of logs:: + + postgres_1 | 2018-06-07 19:11:23.963 UTC [81] FATAL: password authentication failed for user "pydanny" + postgres_1 | 2018-06-07 19:11:23.963 UTC [81] DETAIL: Password does not match for user "pydanny". + postgres_1 | Connection matched pg_hba.conf line 95: "host all all all md5" + +If you recreate the project multiple times with the same name, Docker would preserve the volumes for the postgres container between projects. Here is what happens: + +#. You generate the project the first time. The .env postgres file is populated with the random password +#. You run the docker-compose and the containers are created. The postgres container creates the database based on the .env file credentials +#. You "regenerate" the project with the same name, so the postgres .env file is populated with a new random password +#. You run docker-compose. Since the names of the containers are the same, docker will try to start them (not create them from scratch i.e. it won't execute the Dockerfile to recreate the database). When this happens, it tries to start the database based on the new credentials which do not match the ones that the database was created with, and you get the error message above. + +To fix this, you can either: + +- Clear your project-related Docker cache with ``docker-compose -f local.yml down --volumes --rmi all``. +- Use the Docker volume sub-commands to find volumes (`ls`_) and remove them (`rm`_). +- Use the `prune`_ command to clear system-wide (use with care!). + +.. _ls: https://docs.docker.com/engine/reference/commandline/volume_ls/ +.. _rm: https://docs.docker.com/engine/reference/commandline/volume_rm/ +.. _prune: https://docs.docker.com/v17.09/engine/reference/commandline/system_prune/ + +Others +------ + #. ``project_slug`` must be a valid Python module name or you will have issues on imports. #. ``jinja2.exceptions.TemplateSyntaxError: Encountered unknown tag 'now'.``: please upgrade your cookiecutter version to >= 1.4 (see `#528`_) -#. Internal server error on user registration: make sure you have configured the mail backend (e.g. Mailgun) by adding the API key and sender domain - #. New apps not getting created in project root: This is the expected behavior, because cookiecutter-django does not change the way that django startapp works, you'll have to fix this manually (see `#1725`_) .. _#528: https://github.com/pydanny/cookiecutter-django/issues/528#issuecomment-212650373 diff --git a/hooks/post_gen_project.py b/hooks/post_gen_project.py index 45435dd0..5cc8c32f 100644 --- a/hooks/post_gen_project.py +++ b/hooks/post_gen_project.py @@ -32,10 +32,7 @@ DEBUG_VALUE = "debug" def remove_open_source_files(): - file_names = [ - "CONTRIBUTORS.txt", - "LICENSE", - ] + file_names = ["CONTRIBUTORS.txt", "LICENSE"] for file_name in file_names: os.remove(file_name) @@ -71,7 +68,10 @@ def remove_utility_files(): def remove_heroku_files(): file_names = ["Procfile", "runtime.txt", "requirements.txt"] for file_name in file_names: - if file_name == "requirements.txt" and "{{ cookiecutter.use_travisci }}".lower() == "y": + if ( + file_name == "requirements.txt" + and "{{ cookiecutter.ci_tool }}".lower() == "travis" + ): # don't remove the file if we are using travisci but not using heroku continue os.remove(file_name) @@ -89,14 +89,26 @@ def remove_packagejson_file(): os.remove(file_name) -def remove_celery_app(): - shutil.rmtree(os.path.join("{{ cookiecutter.project_slug }}", "taskapp")) +def remove_celery_files(): + file_names = [ + os.path.join("config", "celery_app.py"), + os.path.join("{{ cookiecutter.project_slug }}", "users", "tasks.py"), + os.path.join( + "{{ cookiecutter.project_slug }}", "users", "tests", "test_tasks.py" + ), + ] + for file_name in file_names: + os.remove(file_name) def remove_dottravisyml_file(): os.remove(".travis.yml") +def remove_dotgitlabciyml_file(): + os.remove(".gitlab-ci.yml") + + def append_to_project_gitignore(path): gitignore_file_path = ".gitignore" with open(gitignore_file_path, "a") as gitignore_file: @@ -183,11 +195,7 @@ def generate_postgres_user(debug=False): def set_postgres_user(file_path, value): - postgres_user = set_flag( - file_path, - "!!!SET POSTGRES_USER!!!", - value=value, - ) + postgres_user = set_flag(file_path, "!!!SET POSTGRES_USER!!!", value=value) return postgres_user @@ -205,9 +213,7 @@ def set_postgres_password(file_path, value=None): def set_celery_flower_user(file_path, value): celery_flower_user = set_flag( - file_path, - "!!!SET CELERY_FLOWER_USER!!!", - value=value, + file_path, "!!!SET CELERY_FLOWER_USER!!!", value=value ) return celery_flower_user @@ -230,11 +236,7 @@ def append_to_gitignore_file(s): gitignore_file.write(os.linesep) -def set_flags_in_envs( - postgres_user, - celery_flower_user, - debug=False, -): +def set_flags_in_envs(postgres_user, celery_flower_user, debug=False): local_django_envs_path = os.path.join(".envs", ".local", ".django") production_django_envs_path = os.path.join(".envs", ".production", ".django") local_postgres_envs_path = os.path.join(".envs", ".local", ".postgres") @@ -244,14 +246,22 @@ def set_flags_in_envs( set_django_admin_url(production_django_envs_path) set_postgres_user(local_postgres_envs_path, value=postgres_user) - set_postgres_password(local_postgres_envs_path, value=DEBUG_VALUE if debug else None) + set_postgres_password( + local_postgres_envs_path, value=DEBUG_VALUE if debug else None + ) set_postgres_user(production_postgres_envs_path, value=postgres_user) - set_postgres_password(production_postgres_envs_path, value=DEBUG_VALUE if debug else None) + set_postgres_password( + production_postgres_envs_path, value=DEBUG_VALUE if debug else None + ) set_celery_flower_user(local_django_envs_path, value=celery_flower_user) - set_celery_flower_password(local_django_envs_path, value=DEBUG_VALUE if debug else None) + set_celery_flower_password( + local_django_envs_path, value=DEBUG_VALUE if debug else None + ) set_celery_flower_user(production_django_envs_path, value=celery_flower_user) - set_celery_flower_password(production_django_envs_path, value=DEBUG_VALUE if debug else None) + set_celery_flower_password( + production_django_envs_path, value=DEBUG_VALUE if debug else None + ) def set_flags_in_settings_files(): @@ -269,6 +279,19 @@ def remove_celery_compose_dirs(): shutil.rmtree(os.path.join("compose", "production", "django", "celery")) +def remove_node_dockerfile(): + shutil.rmtree(os.path.join("compose", "local", "node")) + + +def remove_aws_dockerfile(): + shutil.rmtree(os.path.join("compose", "production", "aws")) + + +def remove_drf_starter_files(): + os.remove(os.path.join("config", "api_router.py")) + shutil.rmtree(os.path.join("{{cookiecutter.project_slug}}", "users", "api")) + + def main(): debug = "{{ cookiecutter.debug }}".lower() == "y" @@ -292,6 +315,12 @@ def main(): else: remove_docker_files() + if ( + "{{ cookiecutter.use_docker }}".lower() == "y" + and "{{ cookiecutter.cloud_provider}}".lower() != "aws" + ): + remove_aws_dockerfile() + if "{{ cookiecutter.use_heroku }}".lower() == "n": remove_heroku_files() @@ -302,8 +331,8 @@ def main(): if "{{ cookiecutter.keep_local_envs_in_vcs }}".lower() == "y": print( INFO + ".env(s) are only utilized when Docker Compose and/or " - "Heroku support is enabled so keeping them does not " - "make sense given your current setup." + TERMINATOR + "Heroku support is enabled so keeping them does not " + "make sense given your current setup." + TERMINATOR ) remove_envs_and_associated_files() else: @@ -315,30 +344,29 @@ def main(): if "{{ cookiecutter.js_task_runner}}".lower() == "none": remove_gulp_files() remove_packagejson_file() - if ( - "{{ cookiecutter.js_task_runner }}".lower() != "none" - and "{{ cookiecutter.use_docker }}".lower() == "y" - ): + if "{{ cookiecutter.use_docker }}".lower() == "y": + remove_node_dockerfile() + + if "{{ cookiecutter.cloud_provider}}".lower() == "none": print( - WARNING - + "Docker and {} JS task runner ".format( - "{{ cookiecutter.js_task_runner }}".lower().capitalize() - ) - + "working together not supported yet. " - "You can continue using the generated project like you " - "normally would, however you would need to add a JS " - "task runner service to your Docker Compose configuration " - "manually." + TERMINATOR + WARNING + "You chose not to use a cloud provider, " + "media files won't be served in production." + TERMINATOR ) if "{{ cookiecutter.use_celery }}".lower() == "n": - remove_celery_app() + remove_celery_files() if "{{ cookiecutter.use_docker }}".lower() == "y": remove_celery_compose_dirs() - if "{{ cookiecutter.use_travisci }}".lower() == "n": + if "{{ cookiecutter.ci_tool }}".lower() != "travis": remove_dottravisyml_file() + if "{{ cookiecutter.ci_tool }}".lower() != "gitlab": + remove_dotgitlabciyml_file() + + if "{{ cookiecutter.use_drf }}".lower() == "n": + remove_drf_starter_files() + print(SUCCESS + "Project initialized, keep up the good work!" + TERMINATOR) diff --git a/hooks/pre_gen_project.py b/hooks/pre_gen_project.py index b7f4dfbb..668a6e27 100644 --- a/hooks/pre_gen_project.py +++ b/hooks/pre_gen_project.py @@ -18,19 +18,24 @@ SUCCESS = "\x1b[1;32m [SUCCESS]: " project_slug = "{{ cookiecutter.project_slug }}" if hasattr(project_slug, "isidentifier"): - assert project_slug.isidentifier(), "'{}' project slug is not a valid Python identifier.".format( - project_slug - ) + assert ( + project_slug.isidentifier() + ), "'{}' project slug is not a valid Python identifier.".format(project_slug) -assert "\\" not in "{{ cookiecutter.author_name }}", "Don't include backslashes in author name." +assert ( + project_slug == project_slug.lower() +), "'{}' project slug should be all lowercase".format(project_slug) + +assert ( + "\\" not in "{{ cookiecutter.author_name }}" +), "Don't include backslashes in author name." if "{{ cookiecutter.use_docker }}".lower() == "n": python_major_version = sys.version_info[0] if python_major_version == 2: print( - WARNING + "Cookiecutter Django does not support Python 2. " - "Stability is guaranteed with Python 3.6+ only, " - "are you sure you want to proceed (y/n)? " + TERMINATOR + WARNING + "You're running cookiecutter under Python 2, but the generated " + "project requires Python 3.7+. Do you want to proceed (y/n)? " + TERMINATOR ) yes_options, no_options = frozenset(["y"]), frozenset(["n"]) while True: @@ -54,3 +59,12 @@ if "{{ cookiecutter.use_docker }}".lower() == "n": ) + TERMINATOR ) + +if ( + "{{ cookiecutter.use_whitenoise }}".lower() == "n" + and "{{ cookiecutter.cloud_provider }}" == "None" +): + print( + "You should either use Whitenoise or select a Cloud Provider to serve static files" + ) + sys.exit(1) diff --git a/pytest.ini b/pytest.ini index c5b30199..89aeb302 100644 --- a/pytest.ini +++ b/pytest.ini @@ -1,3 +1,7 @@ [pytest] +addopts = -x --tb=short python_paths = . norecursedirs = .tox .git */migrations/* */static/* docs venv */{{cookiecutter.project_slug}}/* +markers = + flake8: Run flake8 on all possible template combinations + black: Run black on all possible template combinations diff --git a/requirements.txt b/requirements.txt index 647bccae..afc125ad 100644 --- a/requirements.txt +++ b/requirements.txt @@ -1,14 +1,17 @@ -cookiecutter==1.6.0 +cookiecutter==1.7.0 sh==1.12.14 binaryornot==0.4.4 # Code quality # ------------------------------------------------------------------------------ -flake8==3.7.6 +black==19.10b0 +flake8==3.7.9 # Testing # ------------------------------------------------------------------------------ -tox==3.6.1 -pytest==4.3.1 -pytest-cookies==0.3.0 -pyyaml==5.1 +tox==3.14.3 +pytest==5.3.5 +pytest_cases==1.12.1 +pytest-cookies==0.4.0 +pytest-xdist==1.31.0 +pyyaml==5.3 diff --git a/setup.py b/setup.py index 65bcd8fc..33032009 100644 --- a/setup.py +++ b/setup.py @@ -10,10 +10,10 @@ except ImportError: # Our version ALWAYS matches the version of Django we support # If Django has a new release, we branch, tag, then update this setting after the tag. -version = "2.0.2" +version = "2.2.1" if sys.argv[-1] == "tag": - os.system('git tag -a %s -m "version %s"' % (version, version)) + os.system(f'git tag -a {version} -m "version {version}"') os.system("git push --tags") sys.exit() @@ -34,13 +34,13 @@ setup( classifiers=[ "Development Status :: 4 - Beta", "Environment :: Console", - "Framework :: Django :: 2.0", + "Framework :: Django :: 2.2", "Intended Audience :: Developers", "Natural Language :: English", "License :: OSI Approved :: BSD License", "Programming Language :: Python", "Programming Language :: Python :: 3", - "Programming Language :: Python :: 3.6", + "Programming Language :: Python :: 3.7", "Programming Language :: Python :: Implementation :: CPython", "Topic :: Software Development", ], diff --git a/tests/test_bare.sh b/tests/test_bare.sh new file mode 100755 index 00000000..7021a7e4 --- /dev/null +++ b/tests/test_bare.sh @@ -0,0 +1,26 @@ +#!/bin/sh +# this is a very simple script that tests the docker configuration for cookiecutter-django +# it is meant to be run from the root directory of the repository, eg: +# sh tests/test_docker.sh + +set -o errexit + +# install test requirements +pip install -r requirements.txt + +# create a cache directory +mkdir -p .cache/bare +cd .cache/bare + +# create the project using the default settings in cookiecutter.json +cookiecutter ../../ --no-input --overwrite-if-exists use_docker=n $@ +cd my_awesome_project + +# Install OS deps +sudo utility/install_os_dependencies.sh install + +# Install Python deps +pip install -r requirements/local.txt + +# run the project's tests +pytest diff --git a/tests/test_cookiecutter_generation.py b/tests/test_cookiecutter_generation.py index b2c235a8..40e6cf1b 100755 --- a/tests/test_cookiecutter_generation.py +++ b/tests/test_cookiecutter_generation.py @@ -1,12 +1,14 @@ import os import re -import sh -import yaml import pytest +from cookiecutter.exceptions import FailedHookException +from pytest_cases import pytest_fixture_plus +import sh +import yaml from binaryornot.check import is_binary -PATTERN = "{{(\s?cookiecutter)[.](.*?)}}" +PATTERN = r"{{(\s?cookiecutter)[.](.*?)}}" RE_OBJ = re.compile(PATTERN) @@ -24,6 +26,51 @@ def context(): } +@pytest_fixture_plus +@pytest.mark.parametrize("windows", ["y", "n"], ids=lambda yn: f"win:{yn}") +@pytest.mark.parametrize("use_docker", ["y", "n"], ids=lambda yn: f"docker:{yn}") +@pytest.mark.parametrize("use_celery", ["y", "n"], ids=lambda yn: f"celery:{yn}") +@pytest.mark.parametrize("use_mailhog", ["y", "n"], ids=lambda yn: f"mailhog:{yn}") +@pytest.mark.parametrize("use_sentry", ["y", "n"], ids=lambda yn: f"sentry:{yn}") +@pytest.mark.parametrize("use_compressor", ["y", "n"], ids=lambda yn: f"cmpr:{yn}") +@pytest.mark.parametrize("use_drf", ["y", "n"], ids=lambda yn: f"drf:{yn}") +@pytest.mark.parametrize( + "use_whitenoise,cloud_provider", + [ + ("y", "AWS"), + ("y", "GCP"), + ("y", "None"), + ("n", "AWS"), + ("n", "GCP"), + # no whitenoise + no cloud provider is not supported + ], + ids=lambda id: f"wnoise:{id[0]}-cloud:{id[1]}", +) +def context_combination( + windows, + use_docker, + use_celery, + use_mailhog, + use_sentry, + use_compressor, + use_whitenoise, + use_drf, + cloud_provider, +): + """Fixture that parametrize the function where it's used.""" + return { + "windows": windows, + "use_docker": use_docker, + "use_compressor": use_compressor, + "use_celery": use_celery, + "use_mailhog": use_mailhog, + "use_sentry": use_sentry, + "use_whitenoise": use_whitenoise, + "use_drf": use_drf, + "cloud_provider": cloud_provider, + } + + def build_files_list(root_dir): """Build a list containing absolute paths to the generated files.""" return [ @@ -48,8 +95,13 @@ def check_paths(paths): assert match is None, msg.format(path) -def test_default_configuration(cookies, context): - result = cookies.bake(extra_context=context) +def test_project_generation(cookies, context, context_combination): + """ + Test that project is generated and fully rendered. + + This is parametrized for each combination from ``context_combination`` fixture + """ + result = cookies.bake(extra_context={**context, **context_combination}) assert result.exit_code == 0 assert result.exception is None assert result.project.basename == context["project_slug"] @@ -60,27 +112,14 @@ def test_default_configuration(cookies, context): check_paths(paths) -@pytest.fixture(params=["use_mailhog", "use_celery", "windows"]) -def feature_context(request, context): - context.update({request.param: "y"}) - return context +@pytest.mark.flake8 +def test_flake8_passes(cookies, context_combination): + """ + Generated project should pass flake8. - -def test_enabled_features(cookies, feature_context): - result = cookies.bake(extra_context=feature_context) - assert result.exit_code == 0 - assert result.exception is None - assert result.project.basename == feature_context["project_slug"] - assert result.project.isdir() - - paths = build_files_list(str(result.project)) - assert paths - check_paths(paths) - - -def test_flake8_compliance(cookies): - """generated project should pass flake8""" - result = cookies.bake() + This is parametrized for each combination from ``context_combination`` fixture + """ + result = cookies.bake(extra_context=context_combination) try: sh.flake8(str(result.project)) @@ -88,8 +127,23 @@ def test_flake8_compliance(cookies): pytest.fail(e) +@pytest.mark.black +def test_black_passes(cookies, context_combination): + """ + Generated project should pass black. + + This is parametrized for each combination from ``context_combination`` fixture + """ + result = cookies.bake(extra_context=context_combination) + + try: + sh.black("--check", "--diff", "--exclude", "migrations", f"{result.project}/") + except sh.ErrorReturnCode as e: + pytest.fail(e) + + def test_travis_invokes_pytest(cookies, context): - context.update({"use_travisci": "y"}) + context.update({"ci_tool": "Travis"}) result = cookies.bake(extra_context=context) assert result.exit_code == 0 @@ -97,8 +151,46 @@ def test_travis_invokes_pytest(cookies, context): assert result.project.basename == context["project_slug"] assert result.project.isdir() - with open(f'{result.project}/.travis.yml', 'r') as travis_yml: + with open(f"{result.project}/.travis.yml", "r") as travis_yml: try: - assert yaml.load(travis_yml)['script'] == ['pytest'] + assert yaml.load(travis_yml)["script"] == ["pytest"] except yaml.YAMLError as e: pytest.fail(e) + + +def test_gitlab_invokes_flake8_and_pytest(cookies, context): + context.update({"ci_tool": "Gitlab"}) + result = cookies.bake(extra_context=context) + + assert result.exit_code == 0 + assert result.exception is None + assert result.project.basename == context["project_slug"] + assert result.project.isdir() + + with open(f"{result.project}/.gitlab-ci.yml", "r") as gitlab_yml: + try: + gitlab_config = yaml.load(gitlab_yml) + assert gitlab_config["flake8"]["script"] == ["flake8"] + assert gitlab_config["pytest"]["script"] == ["pytest"] + except yaml.YAMLError as e: + pytest.fail(e) + + +@pytest.mark.parametrize("slug", ["project slug", "Project_Slug"]) +def test_invalid_slug(cookies, context, slug): + """Invalid slug should failed pre-generation hook.""" + context.update({"project_slug": slug}) + + result = cookies.bake(extra_context=context) + + assert result.exit_code != 0 + assert isinstance(result.exception, FailedHookException) + + +def test_no_whitenoise_and_no_cloud_provider(cookies, context): + """It should not generate project if neither whitenoise or cloud provider are set""" + context.update({"use_whitenoise": "n", "cloud_provider": "None"}) + result = cookies.bake(extra_context=context) + + assert result.exit_code != 0 + assert isinstance(result.exception, FailedHookException) diff --git a/tests/test_docker.sh b/tests/test_docker.sh index eddfe98c..55771c14 100755 --- a/tests/test_docker.sh +++ b/tests/test_docker.sh @@ -3,6 +3,8 @@ # it is meant to be run from the root directory of the repository, eg: # sh tests/test_docker.sh +set -o errexit + # install test requirements pip install -r requirements.txt @@ -11,12 +13,15 @@ mkdir -p .cache/docker cd .cache/docker # create the project using the default settings in cookiecutter.json -cookiecutter ../../ --no-input --overwrite-if-exists use_docker=y +cookiecutter ../../ --no-input --overwrite-if-exists use_docker=y $@ cd my_awesome_project # run the project's type checks docker-compose -f local.yml run django mypy my_awesome_project +# Run black with --check option +docker-compose -f local.yml run django black --check --diff --exclude 'migrations' ./ + # run the project's tests docker-compose -f local.yml run django pytest diff --git a/tox.ini b/tox.ini index 040c8a41..1c83465c 100644 --- a/tox.ini +++ b/tox.ini @@ -1,7 +1,19 @@ [tox] skipsdist = true -envlist = py36 +envlist = py37,flake8,black,black-template [testenv] deps = -rrequirements.txt -commands = pytest {posargs:./tests} +commands = pytest -m "not flake8" -m "not black" {posargs:./tests} + +[testenv:flake8] +deps = -rrequirements.txt +commands = pytest -m flake8 {posargs:./tests} + +[testenv:black] +deps = -rrequirements.txt +commands = pytest -m black {posargs:./tests} + +[testenv:black-template] +deps = black +commands = black --check hooks tests setup.py docs diff --git a/{{cookiecutter.project_slug}}/.editorconfig b/{{cookiecutter.project_slug}}/.editorconfig index b19266bf..792dd3b0 100644 --- a/{{cookiecutter.project_slug}}/.editorconfig +++ b/{{cookiecutter.project_slug}}/.editorconfig @@ -13,10 +13,16 @@ indent_style = space indent_size = 4 [*.py] -line_length=120 -known_first_party={{ cookiecutter.project_slug }} -multi_line_output=3 -default_section=THIRDPARTY +line_length = 120 +known_first_party = {{ cookiecutter.project_slug }} +multi_line_output = 3 +default_section = THIRDPARTY +recursive = true +skip = venv/ +skip_glob = **/migrations/*.py +include_trailing_comma = true +force_grid_wrap = 0 +use_parentheses = true [*.{html,css,scss,json,yml}] indent_style = space diff --git a/{{cookiecutter.project_slug}}/.envs/.production/.caddy b/{{cookiecutter.project_slug}}/.envs/.production/.caddy deleted file mode 100644 index 83d7fc7a..00000000 --- a/{{cookiecutter.project_slug}}/.envs/.production/.caddy +++ /dev/null @@ -1,3 +0,0 @@ -# Caddy -# ------------------------------------------------------------------------------ -DOMAIN_NAME={{ cookiecutter.domain_name }} diff --git a/{{cookiecutter.project_slug}}/.envs/.production/.django b/{{cookiecutter.project_slug}}/.envs/.production/.django index 4175f894..2c2e94f2 100644 --- a/{{cookiecutter.project_slug}}/.envs/.production/.django +++ b/{{cookiecutter.project_slug}}/.envs/.production/.django @@ -16,13 +16,18 @@ DJANGO_SECURE_SSL_REDIRECT=False MAILGUN_API_KEY= DJANGO_SERVER_EMAIL= MAILGUN_DOMAIN= - +{% if cookiecutter.cloud_provider == 'AWS' %} # AWS # ------------------------------------------------------------------------------ DJANGO_AWS_ACCESS_KEY_ID= DJANGO_AWS_SECRET_ACCESS_KEY= DJANGO_AWS_STORAGE_BUCKET_NAME= - +{% elif cookiecutter.cloud_provider == 'GCP' %} +# GCP +# ------------------------------------------------------------------------------ +GOOGLE_APPLICATION_CREDENTIALS= +DJANGO_GCP_STORAGE_BUCKET_NAME= +{% endif %} # django-allauth # ------------------------------------------------------------------------------ DJANGO_ACCOUNT_ALLOW_REGISTRATION=True diff --git a/{{cookiecutter.project_slug}}/.gitignore b/{{cookiecutter.project_slug}}/.gitignore index 1874e9d9..cb8ad835 100644 --- a/{{cookiecutter.project_slug}}/.gitignore +++ b/{{cookiecutter.project_slug}}/.gitignore @@ -325,7 +325,6 @@ tags ### VirtualEnv template # Virtualenv -# http://iamzed.com/2009/05/07/a-primer-on-virtualenv/ [Bb]in [Ii]nclude [Ll]ib diff --git a/{{cookiecutter.project_slug}}/.gitlab-ci.yml b/{{cookiecutter.project_slug}}/.gitlab-ci.yml new file mode 100644 index 00000000..15ff73b1 --- /dev/null +++ b/{{cookiecutter.project_slug}}/.gitlab-ci.yml @@ -0,0 +1,33 @@ +stages: + - lint + - test + +variables: + POSTGRES_USER: '{{ cookiecutter.project_slug }}' + POSTGRES_PASSWORD: '' + POSTGRES_DB: 'test_{{ cookiecutter.project_slug }}' + +flake8: + stage: lint + image: python:3.7-alpine + before_script: + - pip install -q flake8 + script: + - flake8 + +pytest: + stage: test + image: python:3.7 + tags: + - docker + services: + - postgres:11 + variables: + DATABASE_URL: pgsql://$POSTGRES_USER:$POSTGRES_PASSWORD@postgres/$POSTGRES_DB + + before_script: + - pip install -r requirements/local.txt + + script: + - pytest + diff --git a/{{cookiecutter.project_slug}}/.idea/workspace.xml b/{{cookiecutter.project_slug}}/.idea/workspace.xml new file mode 100644 index 00000000..a5d73c18 --- /dev/null +++ b/{{cookiecutter.project_slug}}/.idea/workspace.xml @@ -0,0 +1,14 @@ + + + {%- if cookiecutter.use_celery == 'y' %} + + + {%- else %} + + + {%- endif %} + diff --git a/{{cookiecutter.project_slug}}/.idea/{{cookiecutter.project_slug}}.iml b/{{cookiecutter.project_slug}}/.idea/{{cookiecutter.project_slug}}.iml index 8ff6e388..d408765a 100644 --- a/{{cookiecutter.project_slug}}/.idea/{{cookiecutter.project_slug}}.iml +++ b/{{cookiecutter.project_slug}}/.idea/{{cookiecutter.project_slug}}.iml @@ -5,10 +5,10 @@ @@ -25,12 +25,12 @@ - - - -