mirror of
https://github.com/cookiecutter/cookiecutter-django.git
synced 2025-04-21 17:12:05 +03:00
Compare commits
1146 Commits
2024.01.16
...
master
Author | SHA1 | Date | |
---|---|---|---|
|
846af52e73 | ||
|
9a3a3c686a | ||
|
8316bdc708 | ||
|
71f6c8ac09 | ||
|
22fb3141a1 | ||
|
458a21ee5c | ||
|
06c0cd2bf8 | ||
|
10d80a2ff9 | ||
|
6455b39c07 | ||
|
bf9ac68fed | ||
|
bb2d056a15 | ||
|
7ba49c9dc5 | ||
|
b4590cf8d9 | ||
|
8f1b488069 | ||
|
9085f602fa | ||
|
1e19553ded | ||
|
285e86bf71 | ||
|
7d8dabb813 | ||
|
b4f95514aa | ||
|
97c363bf9e | ||
|
6ea1f3c718 | ||
|
a802ac491c | ||
|
f1760fb835 | ||
|
5c5ec3a2ba | ||
|
dd24cdb036 | ||
|
0c1a349bfa | ||
|
54c70edf39 | ||
|
c550acd874 | ||
|
3e95ca6d8f | ||
|
9910a2bb91 | ||
|
179e4846a5 | ||
|
23747ef0f4 | ||
|
2504a349af | ||
|
31f7901060 | ||
|
28aa085015 | ||
|
8459a36369 | ||
|
7fbcc652f1 | ||
|
c04a8e39f9 | ||
|
9c68c8f3db | ||
|
e1a7d93d09 | ||
|
6d5fdb7e04 | ||
|
0822b46c2f | ||
|
1e22b0b987 | ||
|
29c70dfd0d | ||
|
1696a9d277 | ||
|
100271e7e1 | ||
|
b37fa9c79f | ||
|
3f3bc529ae | ||
|
e5c315bbc7 | ||
|
05ce556006 | ||
|
e6fac6075d | ||
|
ffb6f616a7 | ||
|
aa5308e82a | ||
|
0849bb518d | ||
|
3ea0666650 | ||
|
9691dc10c8 | ||
|
51d1c5bb5f | ||
|
fb5fcdbcaf | ||
|
4443eb41c2 | ||
|
c85e2f57f5 | ||
|
50ddc282d7 | ||
|
dfc2fb134b | ||
|
e4ee6565ef | ||
|
16488dd0cf | ||
|
d506d38f2e | ||
|
b3389579c1 | ||
|
7c9f6a75af | ||
|
0b90ecf4bf | ||
|
79f572c010 | ||
|
2d5d84f61a | ||
|
4c8ab6b4ba | ||
|
ee40ae6119 | ||
|
8bc53f538c | ||
|
6e9d2de035 | ||
|
e6f16d424f | ||
|
e7cad8f1c6 | ||
|
e8d328b283 | ||
|
fedf1f34d6 | ||
|
90d55a3a48 | ||
|
c898ed0dc8 | ||
|
fcef872763 | ||
|
697e8e40ce | ||
|
70f5a984b4 | ||
|
ccbb1d3afc | ||
|
7e736503ee | ||
|
cc0bd38eff | ||
|
425520956d | ||
|
ec4e519795 | ||
|
907ee6a607 | ||
|
f30c3c036c | ||
|
1a81521ae8 | ||
|
0299f98693 | ||
|
c448ff5ae6 | ||
|
65b16c82c0 | ||
|
08d1a3ba61 | ||
|
d8f3ad04f4 | ||
|
39f882ffb0 | ||
|
9653f1df97 | ||
|
4978690ae1 | ||
|
0e35c6f3b1 | ||
|
1ec8ef58dd | ||
|
a76b8fe8c8 | ||
|
eb2daaa132 | ||
|
ecbcd763a8 | ||
|
7977437563 | ||
|
9544a2243b | ||
|
431a4bd10c | ||
|
a06ed7ae76 | ||
|
13acd682cc | ||
|
67b3a7b9f3 | ||
|
1962169f57 | ||
|
bcf96d432f | ||
|
d04d3802f3 | ||
|
978455aa94 | ||
|
08cf6f72e5 | ||
|
f79dbf9e17 | ||
|
36b7e7754d | ||
|
c37b46e5cf | ||
|
17e5ad3f17 | ||
|
bde74225a3 | ||
|
8a019b40cf | ||
|
6b8545a95d | ||
|
0be2d9c919 | ||
|
7c6dff3b70 | ||
|
e0da2963c3 | ||
|
bcd27ef206 | ||
|
7371becfcf | ||
|
633762d9d5 | ||
|
0299266df9 | ||
|
bf2a3c5d84 | ||
|
f95ef4f8dc | ||
|
b5b24db9d2 | ||
|
827d35708b | ||
|
70bd35d398 | ||
|
d7c995b873 | ||
|
bb6e4085c7 | ||
|
723a388f3d | ||
|
3e1e6cb41b | ||
|
a691219456 | ||
|
a1105d9010 | ||
|
7f4211ab16 | ||
|
1cad7df6cc | ||
|
2193a661c5 | ||
|
e69d77a0b9 | ||
|
54e0ccec0c | ||
|
185f18dfc0 | ||
|
ab475a5570 | ||
|
564ca18e56 | ||
|
fbf6c886cb | ||
|
2dc567d131 | ||
|
7a85931fb0 | ||
|
c89a45c45f | ||
|
1ba44e9e03 | ||
|
42febc9e84 | ||
|
843f268c4d | ||
|
a8847158c9 | ||
|
d78dff4969 | ||
|
cb4a277136 | ||
|
35dacd8718 | ||
|
01650ddc1b | ||
|
e7edde03ba | ||
|
33aa0c0639 | ||
|
6047eeff3d | ||
|
8fd6404901 | ||
|
5eba578860 | ||
|
0d6de8f23b | ||
|
9cd3d10776 | ||
|
1dbaeaa09d | ||
|
2d54690d34 | ||
|
d39bb8ae16 | ||
|
bb7bffec63 | ||
|
c5b60d27cf | ||
|
cee86b68b1 | ||
|
7324393b15 | ||
|
38639e251e | ||
|
b9eb7c8eca | ||
|
d50dd24e90 | ||
|
6e46acc466 | ||
|
570763e7ce | ||
|
e4b00c26d7 | ||
|
95c3772b3e | ||
|
3ef84f1a26 | ||
|
b0e12c9cea | ||
|
411e92cf9d | ||
|
0434aea33f | ||
|
df2bd97a24 | ||
|
f8d267a78c | ||
|
683f207d4a | ||
|
a623f7bbc6 | ||
|
5b4e769354 | ||
|
33f32b06d9 | ||
|
7e0f4c165d | ||
|
e3929b15e4 | ||
|
f9298f9b99 | ||
|
4dcc9e786f | ||
|
2b09e6bb41 | ||
|
fd9dd32d28 | ||
|
e9f4ba8ae5 | ||
|
250a28bca8 | ||
|
61ffcbf163 | ||
|
1787fe31d2 | ||
|
a7362a0838 | ||
|
fc68927672 | ||
|
df7fcce28f | ||
|
b1bc92ff2c | ||
|
92491bc4ad | ||
|
fb7b05afae | ||
|
6c20b26773 | ||
|
a40c4e8588 | ||
|
8245b0c147 | ||
|
c17fef651f | ||
|
35bed843b8 | ||
|
72cb463ace | ||
|
1b62f5b329 | ||
|
1702b79b8c | ||
|
bb38f34c70 | ||
|
736ff7659d | ||
|
9ce93909a3 | ||
|
277bb8f475 | ||
|
8ce5908229 | ||
|
c187975e1d | ||
|
402fd66165 | ||
|
2c835f0ed5 | ||
|
47bc51274d | ||
|
8c02bf8f93 | ||
|
f63daa7eb1 | ||
|
cedbcbd0bf | ||
|
da24b3aa3f | ||
|
a41bbee492 | ||
|
b25ab14385 | ||
|
7e4ba42e6e | ||
|
857bcb6895 | ||
|
eee987e1b4 | ||
|
d43c73aeb6 | ||
|
3eacd55bf7 | ||
|
d7fe509ce9 | ||
|
9f34c18ffe | ||
|
9465a18868 | ||
|
017c9ae6f8 | ||
|
4dd166267d | ||
|
7675287041 | ||
|
606db119cd | ||
|
f0b50fb735 | ||
|
054eebf510 | ||
|
6a526ac633 | ||
|
88ba893524 | ||
|
0de4a2f6eb | ||
|
c02a16d6bf | ||
|
07061b6e2f | ||
|
c05770a4dc | ||
|
575d4c3809 | ||
|
4617cd2212 | ||
|
b09ee5be8a | ||
|
be2fdb2984 | ||
|
aca580df00 | ||
|
cbead91fdc | ||
|
7fc33c2a23 | ||
|
d00cd42e6c | ||
|
dc8a470a8f | ||
|
b1de740dff | ||
|
6215f17382 | ||
|
f440144fde | ||
|
c97df5f755 | ||
|
28ec065561 | ||
|
3b490ffd2f | ||
|
c357d9ef33 | ||
|
0bb37a3f1f | ||
|
1b9f3e7a48 | ||
|
d3ffe31bd0 | ||
|
e03b0b0c7e | ||
|
08ba595f87 | ||
|
c5988b5c3c | ||
|
73f41e2425 | ||
|
555872e1c9 | ||
|
868db771f3 | ||
|
cf375e934b | ||
|
b932c9bf15 | ||
|
4e506b44f1 | ||
|
8d6c8ebed1 | ||
|
f8954e8a47 | ||
|
1e0c9ff2c1 | ||
|
06bfd2ebaa | ||
|
dd98112051 | ||
|
51ce542796 | ||
|
5136d3da99 | ||
|
f2a45a3ee7 | ||
|
33a7478559 | ||
|
8d5a4269fc | ||
|
6b8d4dc856 | ||
|
4c95392321 | ||
|
6b7c4aebda | ||
|
14ba82423e | ||
|
46795883b5 | ||
|
5f9acfdf47 | ||
|
9bef46c198 | ||
|
571f8787eb | ||
|
a062a146df | ||
|
b40566e401 | ||
|
63044a04cd | ||
|
ab7f01c4d1 | ||
|
272ef407d6 | ||
|
4377481f32 | ||
|
fb27881b33 | ||
|
d40abd321e | ||
|
f4afaf5ba3 | ||
|
231940644e | ||
|
e86fd33633 | ||
|
19294ef8a0 | ||
|
60cc312918 | ||
|
17a1bf7e38 | ||
|
428fac8776 | ||
|
0384b8f001 | ||
|
33a707dd9f | ||
|
fb6549dc4d | ||
|
109238f746 | ||
|
244d7ac9cd | ||
|
ceafff014e | ||
|
b4737acb6d | ||
|
fd05b1de21 | ||
|
edd8a8b315 | ||
|
2533f72d61 | ||
|
020639f86f | ||
|
a829571997 | ||
|
9806ba7a36 | ||
|
8bf9a8c278 | ||
|
02f6336d74 | ||
|
6f5f1b8576 | ||
|
1b83a472ec | ||
|
8c1fac1b2e | ||
|
abc199d5ef | ||
|
ca405049ad | ||
|
ae86e01779 | ||
|
dc4d102477 | ||
|
ff7874b316 | ||
|
67af42be0c | ||
|
16b2c16b52 | ||
|
deb813ca05 | ||
|
4709a30bd7 | ||
|
62f39cda5e | ||
|
8831a8bd11 | ||
|
057562bf26 | ||
|
04d6620733 | ||
|
ccb0d974ed | ||
|
b042b212b6 | ||
|
a0c8863a9c | ||
|
931e300b63 | ||
|
822c1c0327 | ||
|
98f4863a25 | ||
|
c1092f5194 | ||
|
abfc6b0ee2 | ||
|
3f2155ab6e | ||
|
92cf1daaf4 | ||
|
209b54a628 | ||
|
54247e783e | ||
|
6994914572 | ||
|
b96e118f86 | ||
|
5fead041df | ||
|
2eb0b22530 | ||
|
dcde07d0ca | ||
|
bee41248a4 | ||
|
4696302b22 | ||
|
ed184d460a | ||
|
1ebaddc204 | ||
|
f3d4a9fcc7 | ||
|
44dc46997a | ||
|
a0c599d978 | ||
|
0bd606922c | ||
|
8fb90ab02a | ||
|
b66960ed2d | ||
|
9ada7ab125 | ||
|
c3ec0b020e | ||
|
6a2effb67c | ||
|
214e5f5d34 | ||
|
30295fefd2 | ||
|
c2caf522b5 | ||
|
d95f88142c | ||
|
49ed4a0267 | ||
|
c115b2e6de | ||
|
2d06ddda95 | ||
|
3252aa7ae7 | ||
|
f590d14070 | ||
|
d879c0b2ba | ||
|
0d9fbfd729 | ||
|
f86ebbbf3a | ||
|
907bfa5f6b | ||
|
877d81d618 | ||
|
7d9f58bef5 | ||
|
1a893456c3 | ||
|
d826d2e6f0 | ||
|
7114a1e1ba | ||
|
1bbd6d5fe4 | ||
|
29bd84b397 | ||
|
52ce09e623 | ||
|
ba3b48dcd9 | ||
|
0cac725ca1 | ||
|
9a3e7ebf16 | ||
|
5c4abe5b78 | ||
|
0a291c1ec1 | ||
|
a0af461794 | ||
|
b0c45224f1 | ||
|
6c64a810b4 | ||
|
56fce2b0e3 | ||
|
37aa462a2d | ||
|
120ba0d07f | ||
|
732c8784fa | ||
|
2cbb3bd02d | ||
|
d703580c75 | ||
|
168aae17f2 | ||
|
5e582d0392 | ||
|
463d671ffc | ||
|
33c75ad251 | ||
|
6f858e904b | ||
|
9bbee5f84d | ||
|
12380e1e80 | ||
|
e0819933ad | ||
|
ec499fa0ac | ||
|
9379027267 | ||
|
f8ef6c6251 | ||
|
faba566d31 | ||
|
60adaa716d | ||
|
053543d8d2 | ||
|
866e6293ba | ||
|
748382c95e | ||
|
80680fe7ce | ||
|
5023e549e1 | ||
|
811b3010b9 | ||
|
9de28fc3a8 | ||
|
4e7a412b8b | ||
|
8329947a23 | ||
|
5e9d60687b | ||
|
bf67fe405d | ||
|
f3397536d4 | ||
|
4fc2dec1de | ||
|
fc6a38ea83 | ||
|
77813a90ab | ||
|
2b375243d0 | ||
|
eb67667215 | ||
|
d5ab2a1d13 | ||
|
a0d6949877 | ||
|
af9b0bcc40 | ||
|
ec4438a1fd | ||
|
37a20861db | ||
|
d0ddb001a0 | ||
|
c3345e1ece | ||
|
2396bfbca6 | ||
|
7f07b02a84 | ||
|
d1e3391532 | ||
|
f1b5b66347 | ||
|
63abaaadf8 | ||
|
aab05d33f1 | ||
|
1dc0244a60 | ||
|
a5ca04c66f | ||
|
c0b9054486 | ||
|
caae3ce205 | ||
|
6f15595e85 | ||
|
6471d600d5 | ||
|
8431c36225 | ||
|
ac990f6524 | ||
|
f3b3d1432f | ||
|
c82e825509 | ||
|
09314028a0 | ||
|
0b43f145b2 | ||
|
1f6b8b4791 | ||
|
f313362907 | ||
|
043ecd7bcb | ||
|
0c11f9c602 | ||
|
a882f40676 | ||
|
69be07f387 | ||
|
e567d058b3 | ||
|
d841c79d48 | ||
|
4865e056c5 | ||
|
40de2dea1d | ||
|
9f1037ab18 | ||
|
50a8c2b498 | ||
|
8f8f87df51 | ||
|
ab36a6112e | ||
|
3e253ccb1a | ||
|
2c90ff55a3 | ||
|
8559078fd4 | ||
|
1f7fa64df5 | ||
|
ef53145219 | ||
|
66ebc4ecaa | ||
|
a437e493b1 | ||
|
e956b831a5 | ||
|
ab84c2181a | ||
|
1a2bcc5c47 | ||
|
d6f18f04a9 | ||
|
ea4cd48e6c | ||
|
e2a47bf83f | ||
|
9b289081af | ||
|
2d0a8fb4f7 | ||
|
1a141ef29b | ||
|
7ab86a7c0e | ||
|
c44f6f6372 | ||
|
d4030f123a | ||
|
473ae8c059 | ||
|
fec82ed7f7 | ||
|
16ff35eecc | ||
|
4f9c697e59 | ||
|
e782791abc | ||
|
27c654c637 | ||
|
b7fc1973ad | ||
|
ede7c843a6 | ||
|
b30af679f4 | ||
|
aa1f8a803b | ||
|
c9e7693559 | ||
|
e443763e35 | ||
|
f3c0b39928 | ||
|
5a53905f8f | ||
|
b1bfcfe182 | ||
|
83fbf9a770 | ||
|
5b5df8c0b6 | ||
|
6b7ca732dc | ||
|
61ce28ba81 | ||
|
d564f453a6 | ||
|
9fcb4fc27e | ||
|
6f3b72d691 | ||
|
2ff8a5f0d2 | ||
|
e90704fdae | ||
|
44ef825101 | ||
|
31bfe6de12 | ||
|
2a6d2cc23c | ||
|
6e1bab9df0 | ||
|
f947c612fa | ||
|
1bbe30c9b9 | ||
|
e869ad8eaf | ||
|
4bbc2a7fe6 | ||
|
9b0ce11d1d | ||
|
b974fa0ee8 | ||
|
69927387f8 | ||
|
f28b74f2df | ||
|
a906529b32 | ||
|
9321a9ba43 | ||
|
ce92e1e944 | ||
|
165b1ef6b5 | ||
|
2e779508e6 | ||
|
6100821a35 | ||
|
d8a2d2fa9d | ||
|
ab4ed1a135 | ||
|
8af7634e94 | ||
|
0836abca26 | ||
|
f8ca5c2aaf | ||
|
5900b39179 | ||
|
d8051c9db2 | ||
|
47e63900cf | ||
|
864581a13e | ||
|
dc0a511353 | ||
|
a97d2b8b66 | ||
|
c812519624 | ||
|
623007f769 | ||
|
411225bf67 | ||
|
1fc66d0ecf | ||
|
89a1efc2a3 | ||
|
5ba04c6c78 | ||
|
b1089c254c | ||
|
cf1ce9e45f | ||
|
851155656d | ||
|
734faf13ea | ||
|
07af107e1e | ||
|
4b683d86d2 | ||
|
774176f9be | ||
|
89d1ae1536 | ||
|
4e3768145b | ||
|
532941a418 | ||
|
75464de512 | ||
|
1c23f83fc4 | ||
|
107d0125af | ||
|
d0fa1e473d | ||
|
54dca549d3 | ||
|
ced3148af6 | ||
|
f2f1ef6cd4 | ||
|
68814e94e2 | ||
|
c8165ef92c | ||
|
358b26a0d6 | ||
|
e1354239d8 | ||
|
5846b051cb | ||
|
936fc52602 | ||
|
af50d11191 | ||
|
3cbe3b2a90 | ||
|
0576726cab | ||
|
7390948fc5 | ||
|
875d9200cf | ||
|
e6916a858a | ||
|
775c7730f6 | ||
|
74c5794a72 | ||
|
a7f34df264 | ||
|
f56562db79 | ||
|
739ba6206b | ||
|
a2b1055e16 | ||
|
75a9da3de2 | ||
|
20db5f1315 | ||
|
bb053b0497 | ||
|
0d9ffe5d7a | ||
|
3cca3c0bbe | ||
|
5a61a0c828 | ||
|
4413b25706 | ||
|
4c27a59678 | ||
|
db496f9c0c | ||
|
8f4c31cf8c | ||
|
57d8c200e4 | ||
|
676aa25a37 | ||
|
429f468100 | ||
|
bb8a1b6637 | ||
|
6355464230 | ||
|
a8105bdc9f | ||
|
dd15c83977 | ||
|
9b42473bad | ||
|
4c24e7fb18 | ||
|
207a9e576c | ||
|
1af0757340 | ||
|
2b1c6e9f50 | ||
|
55cf48c4f3 | ||
|
5caa9c0160 | ||
|
a6fcb62e19 | ||
|
4540098afc | ||
|
69c3b4c0cb | ||
|
b0e6b5ef76 | ||
|
12e9dc753c | ||
|
bee2e414b7 | ||
|
12c5eacb0c | ||
|
3858e90528 | ||
|
34803624eb | ||
|
41b50bcb7d | ||
|
a6fc34eeea | ||
|
75123bd4ec | ||
|
22e2715c82 | ||
|
e6bd8d1a2a | ||
|
e4a2a46c68 | ||
|
b5d7460ae1 | ||
|
a5f0967eec | ||
|
4d74cd9730 | ||
|
1b88b699c9 | ||
|
88da1f4413 | ||
|
ab081ee471 | ||
|
cb103ce28d | ||
|
e68d935987 | ||
|
e16eed7f1b | ||
|
7082e66db3 | ||
|
0834ba6870 | ||
|
dcaf6741de | ||
|
bd82fe72f8 | ||
|
f669e101a0 | ||
|
634b091756 | ||
|
2443932974 | ||
|
e260a5da7e | ||
|
32eb1b2e8c | ||
|
057fb75a45 | ||
|
bf76313bdf | ||
|
5ba14a2d2c | ||
|
722e2bc4d5 | ||
|
a319c8284e | ||
|
c662c4d615 | ||
|
4e39cd2d51 | ||
|
00bac8eb68 | ||
|
d389302b6d | ||
|
ae03ac4702 | ||
|
45c4429154 | ||
|
cc67304d9c | ||
|
6754a2ec04 | ||
|
ef433d1a17 | ||
|
3156e2a560 | ||
|
78593239ee | ||
|
e6336a29f0 | ||
|
7cbc7e920e | ||
|
d5198dc40b | ||
|
0dd5c6ee38 | ||
|
2fbceaf6c3 | ||
|
96a5a2d1dd | ||
|
9631c0a817 | ||
|
889d8e0732 | ||
|
8cf31c9317 | ||
|
bac56c4a7f | ||
|
3463d8ad17 | ||
|
e716be4d36 | ||
|
c3e1983ef1 | ||
|
abd00d18ae | ||
|
a0c6867722 | ||
|
888d48728e | ||
|
fef5476742 | ||
|
20bae48753 | ||
|
b65afbce8c | ||
|
ae64eef2ee | ||
|
de2239ab2f | ||
|
29027487fa | ||
|
edbdefb0a7 | ||
|
2d18e673a3 | ||
|
0e7ca03345 | ||
|
899886fa2a | ||
|
b200d44885 | ||
|
0f7aed4f27 | ||
|
9757b1739e | ||
|
c7a4b9bb45 | ||
|
1f82072d5b | ||
|
8e319a21db | ||
|
bcfbb7b712 | ||
|
315724b797 | ||
|
b6aa5b1535 | ||
|
29705080b9 | ||
|
bacd17bb08 | ||
|
3257b3d270 | ||
|
a4f2206741 | ||
|
85c6014597 | ||
|
431224d6e4 | ||
|
9f64012987 | ||
|
e71dd7c1fd | ||
|
ef34d668da | ||
|
f9b30c7bc8 | ||
|
daa858225f | ||
|
875022864a | ||
|
e03d9a6854 | ||
|
cc41c0b804 | ||
|
1e260740c2 | ||
|
0b95d16aaa | ||
|
16652acaf1 | ||
|
d40623b382 | ||
|
a2e364b5c1 | ||
|
401ffd0e86 | ||
|
c3335e6604 | ||
|
bcd4437491 | ||
|
f9a4d864db | ||
|
c4f7b09cf3 | ||
|
f6b9fa3708 | ||
|
987bee4573 | ||
|
80674b017e | ||
|
03e68e893f | ||
|
c11d5519d6 | ||
|
ed0857f7b3 | ||
|
453ea25b1d | ||
|
bde8236e9a | ||
|
07c125d940 | ||
|
4b94cde887 | ||
|
a0ae1945b5 | ||
|
c1ca194e4f | ||
|
d3ce7925b2 | ||
|
8969cf60ab | ||
|
cbe4532af5 | ||
|
f6bf52173e | ||
|
eec17f7c57 | ||
|
ad632484fd | ||
|
b6656379af | ||
|
db38d8b00f | ||
|
33c36f17cd | ||
|
690b1ad327 | ||
|
1e1255c966 | ||
|
e3adc3561f | ||
|
f2880ab5d5 | ||
|
66e4d3d99a | ||
|
75a8dae0dc | ||
|
7216565cfa | ||
|
824630a9e9 | ||
|
ed59d08ec3 | ||
|
53394a6ac7 | ||
|
3f38cfac5a | ||
|
0cfd278da2 | ||
|
fc5182591f | ||
|
34644d50ba | ||
|
2d5712baf0 | ||
|
3dd83f47ff | ||
|
0216ebb268 | ||
|
07e6282244 | ||
|
2aae512ef6 | ||
|
30a92f55af | ||
|
310b803f29 | ||
|
efc3882980 | ||
|
28e4ded048 | ||
|
779d51e60a | ||
|
4849aeb7f8 | ||
|
3965c385b0 | ||
|
a0c2e1f31f | ||
|
4bdc239e03 | ||
|
6ed9e588b2 | ||
|
ba97c2a719 | ||
|
c319d929d1 | ||
|
bdaa17ca07 | ||
|
b669566bc5 | ||
|
c3c099095b | ||
|
aeb151d348 | ||
|
b8b470c796 | ||
|
3230a1c371 | ||
|
084434c9db | ||
|
cc37044a8f | ||
|
53ac394a6f | ||
|
4e48f9e1ec | ||
|
6db133df42 | ||
|
9e721a8a02 | ||
|
b119eae9b4 | ||
|
b9f6d8875e | ||
|
5a51ef2247 | ||
|
fdf0254392 | ||
|
2f9051fbbd | ||
|
f29c1c67e3 | ||
|
e5fef81958 | ||
|
0bf7e51c1b | ||
|
9eec5fc227 | ||
|
1732734ce5 | ||
|
beba4c177d | ||
|
e9f029decf | ||
|
4710101acd | ||
|
88a9a15dbf | ||
|
9fd3154127 | ||
|
34f8a2e154 | ||
|
bd5d75a246 | ||
|
f82807f53a | ||
|
bebffe7cc9 | ||
|
9322f16b1a | ||
|
23c1fa6e35 | ||
|
10c85ce3d6 | ||
|
b60d26f6bd | ||
|
6f68602bbc | ||
|
b3f1f8ef93 | ||
|
b27efd4697 | ||
|
3166ed72f1 | ||
|
fcc565b85e | ||
|
ba7eb5a5d4 | ||
|
8b41dde2f0 | ||
|
9557d9ba0d | ||
|
a45bb9eb2c | ||
|
2c55a3d443 | ||
|
f58530288c | ||
|
9b2947a353 | ||
|
4fbd009965 | ||
|
26abd3a7a8 | ||
|
ebf082b696 | ||
|
206e6b018f | ||
|
340795cf4e | ||
|
a2129172d9 | ||
|
1b6a8e5ad0 | ||
|
8e9bd2273e | ||
|
7fa5d86020 | ||
|
3608c21fe7 | ||
|
38318ab7f5 | ||
|
4c46f4b97e | ||
|
f0d2f40a2e | ||
|
f66396d812 | ||
|
e9d6b24c89 | ||
|
5411df989f | ||
|
9cbd8c5f77 | ||
|
edf043a820 | ||
|
a65b0d4720 | ||
|
e176d92a8b | ||
|
5d0e593209 | ||
|
f9cb423b00 | ||
|
9d1316c7d0 | ||
|
0d44ffee84 | ||
|
c32ddea18f | ||
|
b10a1e7df5 | ||
|
108ae4e83d | ||
|
4937e29adf | ||
|
8a9b4eb58a | ||
|
090153079e | ||
|
00ecfc5278 | ||
|
3f5f90e002 | ||
|
ae52c62692 | ||
|
d4bad0c306 | ||
|
89acf137f5 | ||
|
d5d41fc756 | ||
|
1b42f65b03 | ||
|
1111c1afa5 | ||
|
b15cae614a | ||
|
929992e6d8 | ||
|
af9cfc5002 | ||
|
9006d69055 | ||
|
83d3e6f83d | ||
|
c9d64fad62 | ||
|
44c94a2cf6 | ||
|
e686ab1271 | ||
|
ee6de7951f | ||
|
fd3acfc11f | ||
|
0e71fa21b6 | ||
|
e8e89f66e9 | ||
|
2e9decda8d | ||
|
352e5dae21 | ||
|
5f78a8f190 | ||
|
861ad0d1fc | ||
|
cf7dfed348 | ||
|
7fde9bf188 | ||
|
ea68dd5c5d | ||
|
27b8450bc0 | ||
|
9bd0eff3a7 | ||
|
173af52ebb | ||
|
f79c89d928 | ||
|
db60766526 | ||
|
79ff7ca0bb | ||
|
c6b972d712 | ||
|
8d2b6a265b | ||
|
5395aeff52 | ||
|
b96dac09a9 | ||
|
94269f8c7f | ||
|
7d032d7303 | ||
|
8a2eb3142f | ||
|
8492ebe631 | ||
|
df0d91c873 | ||
|
d8ade3be0d | ||
|
f6081a32f7 | ||
|
0e63336ddb | ||
|
5916cf0d30 | ||
|
681c13e0fa | ||
|
eae5c9c1cd | ||
|
b99ad39862 | ||
|
8060df4c51 | ||
|
83ceb39b50 | ||
|
e2b2d36b4f | ||
|
aa0e49ae4b | ||
|
831ce14ca3 | ||
|
1661e55375 | ||
|
559fae8a70 | ||
|
39af435201 | ||
|
27734891ab | ||
|
ab69189482 | ||
|
9956f2b2e6 | ||
|
d430e2f3fc | ||
|
4abd083c64 | ||
|
ccdb6a4ba0 | ||
|
668ee336c9 | ||
|
7e41971a76 | ||
|
53e360c8db | ||
|
101bfdb259 | ||
|
4b9873a4f1 | ||
|
10f6402d94 | ||
|
38406ca107 | ||
|
0c5b738922 | ||
|
525ebf2b45 | ||
|
8899824460 | ||
|
1c9c91b15e | ||
|
308139717d | ||
|
79b2828f7b | ||
|
a43cbf0709 | ||
|
a3f3268c95 | ||
|
349679ef33 | ||
|
5af055bccb | ||
|
719fa5d57b | ||
|
35f21ba697 | ||
|
b9e88f94a7 | ||
|
bca7d02f0a | ||
|
099d0f2391 | ||
|
a8ff250a9d | ||
|
4166119930 | ||
|
f07848448a | ||
|
ba4d7f6435 | ||
|
bd9181befa | ||
|
52b4a61d82 | ||
|
bf09d5614f | ||
|
c616a03632 | ||
|
f4362c7676 | ||
|
1a7e6a8414 | ||
|
ad6faa617d | ||
|
574163670d | ||
|
883d60fced | ||
|
387f03e673 | ||
|
2e8412521a | ||
|
747a1b1502 | ||
|
3b3cf416f3 | ||
|
bb2c1c9952 | ||
|
88a90402a4 | ||
|
a1d34a08af | ||
|
b488056b63 | ||
|
5c71441c9b | ||
|
b5839e28af | ||
|
523b61ae7c | ||
|
6fd8493831 | ||
|
45266e81f4 | ||
|
ba4f3299ba | ||
|
0ba5fc708d | ||
|
20d0e2bfe7 | ||
|
7b1d5f1598 | ||
|
c76c14ace5 | ||
|
a406fde1af | ||
|
93024ca941 | ||
|
2e9a9b0288 | ||
|
c1d580af48 | ||
|
a1541891ed | ||
|
6c5ac37ed0 | ||
|
cd9b5c0201 | ||
|
c561f51660 | ||
|
b27fd26b78 | ||
|
cc55c8c671 | ||
|
374915c6a1 | ||
|
64cb0073f8 | ||
|
9e8931e25c | ||
|
a11547f27e | ||
|
f39aff8b3f | ||
|
f1da6ba5f5 | ||
|
0b4e92739c | ||
|
2aff1bdb75 | ||
|
3d4dcee356 | ||
|
6d6f037b07 | ||
|
22d927ed17 | ||
|
fa44078ca6 | ||
|
074ebce0c7 | ||
|
54185228e3 | ||
|
f15aa53f10 | ||
|
4887b92148 | ||
|
48fadda99a | ||
|
46c0c51c52 | ||
|
b0cfbc35b1 | ||
|
e72da846f2 | ||
|
96680378f9 | ||
|
93e5e16fc2 | ||
|
ebb6c8b225 | ||
|
95e5598dfd | ||
|
f617433bac | ||
|
81a1fc8a38 | ||
|
f86df89db9 | ||
|
1174889779 | ||
|
6dcf48fd0a | ||
|
57158338ce | ||
|
daacdd078b | ||
|
5f2c9e6e40 | ||
|
c7872e6c0f | ||
|
56f630bae6 | ||
|
c3708c0809 | ||
|
0e41e5e8d0 | ||
|
2c93ef4009 | ||
|
052330272a | ||
|
8f2b894176 | ||
|
ddf1852768 | ||
|
91ebf6f95b | ||
|
899a1915cd | ||
|
37f974157b | ||
|
0ca9be321a | ||
|
4db3ea1e58 | ||
|
916f666637 | ||
|
357604f37b | ||
|
a76656aba1 | ||
|
49a66b3583 | ||
|
ee55aa29b4 | ||
|
a247d8f8a2 | ||
|
70cde064d4 | ||
|
c6bd47a1bf | ||
|
8031a2a51a | ||
|
4391f1da5c | ||
|
b756d904d7 | ||
|
8c60674654 | ||
|
adebac5b88 | ||
|
2ac513e0c1 | ||
|
7db193d9fa | ||
|
dea7316a62 | ||
|
cb0e06dcbd | ||
|
f77233d7c0 | ||
|
80147bd340 | ||
|
6237573637 | ||
|
fd959eedd9 | ||
|
bf9a861ddc | ||
|
de54e4fba3 | ||
|
27a7a4085d | ||
|
5bc8ac664c | ||
|
9a5b9c2f53 | ||
|
0fc4ea6165 | ||
|
6ba6104f09 | ||
|
dd841c6478 | ||
|
07376d8a7f | ||
|
2e129bd65b | ||
|
f12ac669f3 | ||
|
991f20e44f | ||
|
576bb1d452 | ||
|
6ac86a5b57 | ||
|
1086d19af9 | ||
|
e95c2733dd | ||
|
1283102629 | ||
|
9b0bc1cc5c | ||
|
474b82b5d3 | ||
|
1e09d20ffc | ||
|
519d07144c | ||
|
d4ff2e4adf | ||
|
44ca412dc1 | ||
|
6b90a9e701 | ||
|
bb6d176235 | ||
|
d1cc4dc801 | ||
|
f9b4ce315f | ||
|
908697e5a3 | ||
|
f2c320527a | ||
|
300ccc6b57 | ||
|
d5967ed3e9 | ||
|
2f93474a0c | ||
|
50b7ebf9da | ||
|
df730044b2 | ||
|
10376eb172 | ||
|
3cd8d87592 | ||
|
f00d698952 | ||
|
52e7b05209 | ||
|
27356353d8 | ||
|
676234b3a4 | ||
|
385ef51464 | ||
|
5088786856 | ||
|
b55500d772 | ||
|
545d380a55 | ||
|
718a414e2e | ||
|
a4741c5a8f | ||
|
f69abd1352 | ||
|
2225fae2b0 | ||
|
a4d7a31d89 | ||
|
c2d236fc7f | ||
|
0cb95819a9 | ||
|
0d10182a93 | ||
|
f6db519de0 | ||
|
71011c2897 | ||
|
6635886302 | ||
|
f4086f83a7 | ||
|
102a94f1aa | ||
|
9ea2365d2f | ||
|
b556cef533 | ||
|
f3cf85500c | ||
|
7d75873f09 | ||
|
299a2dc550 | ||
|
5d8a538f25 | ||
|
db3b1d5117 | ||
|
5d9efa648b | ||
|
a11f02de8d | ||
|
cce9c0ee00 | ||
|
5083f866d7 | ||
|
9227319998 | ||
|
96756036f8 | ||
|
4f02e397d5 | ||
|
e87b6fad33 | ||
|
d45c042cd7 | ||
|
2ef8d0a78e | ||
|
a5cf715966 | ||
|
c7c16c746c | ||
|
e840b75eba | ||
|
544e1e1038 | ||
|
05c0d3ee52 | ||
|
b68913ccef | ||
|
e4d6b98c93 | ||
|
8324c160e8 | ||
|
6301fcc603 | ||
|
cf788ffe61 | ||
|
991a1097dd | ||
|
c7eded8644 | ||
|
956469849a | ||
|
bda89eaa06 | ||
|
4da8386f9c | ||
|
cd0a76c46f | ||
|
1899b485e6 | ||
|
e354622bb7 | ||
|
a634c7aed9 | ||
|
d9033c1ee2 | ||
|
86faba37d9 | ||
|
572ed96d35 | ||
|
238d54a8a8 | ||
|
5aa3c60537 | ||
|
3f57c5525a | ||
|
1ee1de2df7 | ||
|
d990dcacab | ||
|
350e74322d | ||
|
bc06b0da59 | ||
|
5665b617fc |
|
@ -12,7 +12,7 @@ trim_trailing_whitespace = true
|
|||
indent_style = space
|
||||
indent_size = 4
|
||||
|
||||
[*.{html,css,scss,json,yml,xml}]
|
||||
[*.{html,css,scss,json,yml,xml,toml}]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
|
||||
|
|
2
.github/FUNDING.yml
vendored
2
.github/FUNDING.yml
vendored
|
@ -1,5 +1,5 @@
|
|||
# These are supported funding model platforms
|
||||
|
||||
github: [pydanny, browniebroke]
|
||||
github: [pydanny, browniebroke, luzfcb]
|
||||
patreon: feldroy
|
||||
open_collective: cookiecutter-django
|
||||
|
|
207
.github/contributors.json
vendored
207
.github/contributors.json
vendored
|
@ -1115,7 +1115,7 @@
|
|||
"twitter_username": "Qoyyuum"
|
||||
},
|
||||
{
|
||||
"name": "mfosterw",
|
||||
"name": "Matthew Foster Walsh",
|
||||
"github_login": "mfosterw",
|
||||
"twitter_username": ""
|
||||
},
|
||||
|
@ -1508,5 +1508,210 @@
|
|||
"name": "Nix Siow",
|
||||
"github_login": "nixsiow",
|
||||
"twitter_username": "nixsiow"
|
||||
},
|
||||
{
|
||||
"name": "Jens Kaeske",
|
||||
"github_login": "jkaeske",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "henningbra",
|
||||
"github_login": "henningbra",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "Paul Wulff",
|
||||
"github_login": "mtmpaulwulff",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "Mounir",
|
||||
"github_login": "mounirmesselmeni",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "JAEGYUN JUNG",
|
||||
"github_login": "TGoddessana",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "Simeon Emanuilov",
|
||||
"github_login": "s-emanuilov",
|
||||
"twitter_username": "s_emanuilov"
|
||||
},
|
||||
{
|
||||
"name": "Patrick Zhang",
|
||||
"github_login": "PatDuJour",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "GvS",
|
||||
"github_login": "GvS666",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "David Păcioianu",
|
||||
"github_login": "DavidPacioianu",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "farwill",
|
||||
"github_login": "farwill",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "quroom",
|
||||
"github_login": "quroom",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "Marios Frixou",
|
||||
"github_login": "frixou89",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "Geo Maciolek",
|
||||
"github_login": "GeoMaciolek",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "Nadav Peretz",
|
||||
"github_login": "nadavperetz",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "Param Kapur",
|
||||
"github_login": "paramkpr",
|
||||
"twitter_username": "ParamKapur"
|
||||
},
|
||||
{
|
||||
"name": "Jason Mok",
|
||||
"github_login": "jasonmokk",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "Manas Mallick",
|
||||
"github_login": "ManDun",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "Alexandr Artemyev",
|
||||
"github_login": "Mogost",
|
||||
"twitter_username": "MOGOST"
|
||||
},
|
||||
{
|
||||
"name": "Ali Shamakhi",
|
||||
"github_login": "ali-shamakhi",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "Filipe Nascimento",
|
||||
"github_login": "FilipeNas",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "Kevin Mills",
|
||||
"github_login": "millsks",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "milvagox",
|
||||
"github_login": "milvagox",
|
||||
"twitter_username": "milvagox"
|
||||
},
|
||||
{
|
||||
"name": "Johnny Metz",
|
||||
"github_login": "johnnymetz",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "Will",
|
||||
"github_login": "novucs",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "rxm7706",
|
||||
"github_login": "rxm7706",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "Marlon Castillo",
|
||||
"github_login": "mcastle",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "Alex Kanavos",
|
||||
"github_login": "alexkanavos",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "LJFP",
|
||||
"github_login": "ljfp",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "Francisco Navarro Morales ",
|
||||
"github_login": "spothound",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "Mariot Tsitoara",
|
||||
"github_login": "mariot",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "Christian Jensen",
|
||||
"github_login": "jensenbox",
|
||||
"twitter_username": "cjensen"
|
||||
},
|
||||
{
|
||||
"name": "Denis Darii",
|
||||
"github_login": "DNX",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "qwerrrqw",
|
||||
"github_login": "qwerrrqw",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "Pulse-Mind",
|
||||
"github_login": "pulse-mind",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "Hana Belay",
|
||||
"github_login": "earthcomfy",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "Ed Morley",
|
||||
"github_login": "edmorley",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "Alan Cyment",
|
||||
"github_login": "acyment",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "Kawsar Alam Foysal",
|
||||
"github_login": "iamfoysal",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "Igor Jerosimić",
|
||||
"github_login": "igor-wl",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "Pepa",
|
||||
"github_login": "07pepa",
|
||||
"twitter_username": ""
|
||||
},
|
||||
{
|
||||
"name": "Aidos Kanapyanov",
|
||||
"github_login": "aidoskanapyanov",
|
||||
"twitter_username": ""
|
||||
}
|
||||
]
|
75
.github/dependabot.yml
vendored
75
.github/dependabot.yml
vendored
|
@ -1,8 +1,17 @@
|
|||
# Config for Dependabot updates. See Documentation here:
|
||||
# https://docs.github.com/code-security/supply-chain-security/keeping-your-dependencies-updated-automatically/configuration-options-for-dependency-updates
|
||||
# https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
|
||||
|
||||
version: 2
|
||||
enable-beta-ecosystems: true
|
||||
updates:
|
||||
# Update Python deps for the template (not the generated project)
|
||||
- package-ecosystem: "uv"
|
||||
directory: "/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
labels:
|
||||
- "project infrastructure"
|
||||
|
||||
# Update GitHub actions in workflows
|
||||
- package-ecosystem: "github-actions"
|
||||
directory: "/"
|
||||
|
@ -20,11 +29,11 @@ updates:
|
|||
- "update"
|
||||
|
||||
# Enable version updates for Docker
|
||||
# We need to specify each Dockerfile in a separate entry because Dependabot doesn't
|
||||
# support wildcards or recursively checking subdirectories. Check this issue for updates:
|
||||
# https://github.com/dependabot/dependabot-core/issues/2178
|
||||
- package-ecosystem: "docker"
|
||||
directory: "{{cookiecutter.project_slug}}/compose/local/django/"
|
||||
directories:
|
||||
- "{{cookiecutter.project_slug}}/compose/local/django/"
|
||||
- "{{cookiecutter.project_slug}}/compose/local/docs/"
|
||||
- "{{cookiecutter.project_slug}}/compose/production/django/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
ignore:
|
||||
|
@ -34,54 +43,18 @@ updates:
|
|||
- "version-update:semver-minor"
|
||||
labels:
|
||||
- "update"
|
||||
groups:
|
||||
docker-python:
|
||||
patterns:
|
||||
- "*"
|
||||
|
||||
- package-ecosystem: "docker"
|
||||
directory: "{{cookiecutter.project_slug}}/compose/local/docs/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
ignore:
|
||||
- dependency-name: "*"
|
||||
update-types:
|
||||
- "version-update:semver-major"
|
||||
- "version-update:semver-minor"
|
||||
labels:
|
||||
- "update"
|
||||
|
||||
- package-ecosystem: "docker"
|
||||
directory: "{{cookiecutter.project_slug}}/compose/local/node/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
labels:
|
||||
- "update"
|
||||
|
||||
- package-ecosystem: "docker"
|
||||
directory: "{{cookiecutter.project_slug}}/compose/production/aws/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
labels:
|
||||
- "update"
|
||||
|
||||
- package-ecosystem: "docker"
|
||||
directory: "{{cookiecutter.project_slug}}/compose/production/django/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
ignore:
|
||||
- dependency-name: "*"
|
||||
update-types:
|
||||
- "version-update:semver-major"
|
||||
- "version-update:semver-minor"
|
||||
labels:
|
||||
- "update"
|
||||
|
||||
- package-ecosystem: "docker"
|
||||
directory: "{{cookiecutter.project_slug}}/compose/production/postgres/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
labels:
|
||||
- "update"
|
||||
|
||||
- package-ecosystem: "docker"
|
||||
directory: "{{cookiecutter.project_slug}}/compose/production/traefik/"
|
||||
directories:
|
||||
- "{{cookiecutter.project_slug}}/compose/local/node/"
|
||||
- "{{cookiecutter.project_slug}}/compose/production/aws/"
|
||||
- "{{cookiecutter.project_slug}}/compose/production/postgres/"
|
||||
- "{{cookiecutter.project_slug}}/compose/production/nginx/"
|
||||
- "{{cookiecutter.project_slug}}/compose/production/traefik/"
|
||||
schedule:
|
||||
interval: "daily"
|
||||
labels:
|
||||
|
|
52
.github/workflows/align-versions.yml
vendored
Normal file
52
.github/workflows/align-versions.yml
vendored
Normal file
|
@ -0,0 +1,52 @@
|
|||
name: align versions
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- "{{cookiecutter.project_slug}}/requirements/local.txt"
|
||||
- "{{cookiecutter.project_slug}}/compose/local/node/Dockerfile"
|
||||
# Manual trigger
|
||||
workflow_dispatch:
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
run:
|
||||
if: ${{ github.actor == 'pyup-bot' }}
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
GH_PAT: ${{ secrets.GH_PAT }}
|
||||
|
||||
strategy:
|
||||
fail-fast: false
|
||||
matrix:
|
||||
job:
|
||||
- script: scripts/ruff_version.py
|
||||
name: Ruff
|
||||
- script: scripts/node_version.py
|
||||
name: Node
|
||||
|
||||
name: "${{ matrix.job.name }} versions"
|
||||
steps:
|
||||
- name: Checkout with token
|
||||
uses: actions/checkout@v4
|
||||
if: ${{ env.GH_PAT != '' }}
|
||||
with:
|
||||
token: ${{ env.GH_PAT }}
|
||||
ref: ${{ github.head_ref }}
|
||||
|
||||
- name: Checkout without token
|
||||
uses: actions/checkout@v4
|
||||
if: ${{ env.GH_PAT == '' }}
|
||||
with:
|
||||
ref: ${{ github.head_ref }}
|
||||
|
||||
- uses: astral-sh/setup-uv@v5
|
||||
|
||||
- run: uv run ${{ matrix.job.script }}
|
||||
|
||||
- uses: stefanzweifel/git-auto-commit-action@v5
|
||||
with:
|
||||
commit_message: Align versions
|
35
.github/workflows/ci.yml
vendored
35
.github/workflows/ci.yml
vendored
|
@ -23,14 +23,12 @@ jobs:
|
|||
runs-on: ${{ matrix.os }}
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.11"
|
||||
cache: pip
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v5
|
||||
- name: Install dependencies
|
||||
run: pip install -r requirements.txt
|
||||
run: uv sync
|
||||
- name: Run tests
|
||||
run: pytest -n auto tests
|
||||
run: uv run pytest -n auto tests
|
||||
|
||||
docker:
|
||||
strategy:
|
||||
|
@ -54,12 +52,10 @@ jobs:
|
|||
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.11"
|
||||
cache: pip
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v5
|
||||
- name: Install dependencies
|
||||
run: pip install -r requirements.txt
|
||||
run: uv sync
|
||||
- name: Docker ${{ matrix.script.name }}
|
||||
run: sh tests/test_docker.sh ${{ matrix.script.args }}
|
||||
|
||||
|
@ -85,14 +81,14 @@ jobs:
|
|||
ports:
|
||||
- 6379:6379
|
||||
postgres:
|
||||
image: postgres:12
|
||||
image: postgres:13
|
||||
ports:
|
||||
- 5432:5432
|
||||
env:
|
||||
POSTGRES_PASSWORD: postgres
|
||||
|
||||
env:
|
||||
CELERY_BROKER_URL: "redis://localhost:6379/0"
|
||||
REDIS_URL: "redis://localhost:6379/0"
|
||||
# postgres://user:password@host:port/database
|
||||
DATABASE_URL: "postgres://postgres:postgres@localhost:5432/postgres"
|
||||
|
||||
|
@ -100,16 +96,13 @@ jobs:
|
|||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.11"
|
||||
cache: pip
|
||||
cache-dependency-path: |
|
||||
requirements.txt
|
||||
{{cookiecutter.project_slug}}/requirements/base.txt
|
||||
{{cookiecutter.project_slug}}/requirements/local.txt
|
||||
python-version: "3.12"
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v5
|
||||
- name: Install dependencies
|
||||
run: pip install -r requirements.txt
|
||||
run: uv sync
|
||||
- uses: actions/setup-node@v4
|
||||
with:
|
||||
node-version: "20"
|
||||
node-version: "22.14"
|
||||
- name: Bare Metal ${{ matrix.script.name }}
|
||||
run: sh tests/test_bare.sh ${{ matrix.script.args }}
|
||||
|
|
33
.github/workflows/dependabot-uv-lock.yml
vendored
Normal file
33
.github/workflows/dependabot-uv-lock.yml
vendored
Normal file
|
@ -0,0 +1,33 @@
|
|||
name: uv
|
||||
|
||||
on:
|
||||
pull_request:
|
||||
paths:
|
||||
- "pyproject.toml"
|
||||
|
||||
permissions:
|
||||
contents: write
|
||||
pull-requests: write
|
||||
|
||||
jobs:
|
||||
lock:
|
||||
if: ${{ github.actor == 'dependabot[bot]' }}
|
||||
runs-on: ubuntu-latest
|
||||
env:
|
||||
GH_PAT: ${{ secrets.GH_PAT }}
|
||||
steps:
|
||||
- name: Checkout with token
|
||||
uses: actions/checkout@v4
|
||||
if: ${{ env.GH_PAT != '' }}
|
||||
with:
|
||||
token: ${{ env.GH_PAT }}
|
||||
|
||||
- name: Checkout without token
|
||||
uses: actions/checkout@v4
|
||||
if: ${{ env.GH_PAT == '' }}
|
||||
|
||||
- uses: astral-sh/setup-uv@v5
|
||||
- run: uv lock
|
||||
- uses: stefanzweifel/git-auto-commit-action@v5
|
||||
with:
|
||||
commit_message: Regenerate uv.lock
|
11
.github/workflows/django-issue-checker.yml
vendored
11
.github/workflows/django-issue-checker.yml
vendored
|
@ -17,14 +17,9 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.11"
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements.txt
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v5
|
||||
- name: Create Django Major Issue
|
||||
run: python scripts/create_django_issue.py
|
||||
run: uv run --frozen scripts/create_django_issue.py
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
|
6
.github/workflows/issue-manager.yml
vendored
6
.github/workflows/issue-manager.yml
vendored
|
@ -23,7 +23,7 @@ jobs:
|
|||
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: tiangolo/issue-manager@0.4.1
|
||||
- uses: tiangolo/issue-manager@0.5.1
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
config: >
|
||||
|
@ -39,5 +39,9 @@ jobs:
|
|||
"waiting": {
|
||||
"delay": 864000,
|
||||
"message": "Automatically closing after waiting for additional info. To re-open, please provide the additional information requested."
|
||||
},
|
||||
"wontfix": {
|
||||
"delay": 864000,
|
||||
"message": "As discussed, we won't be implementing this. Automatically closing."
|
||||
}
|
||||
}
|
||||
|
|
4
.github/workflows/pre-commit-autoupdate.yml
vendored
4
.github/workflows/pre-commit-autoupdate.yml
vendored
|
@ -24,7 +24,7 @@ jobs:
|
|||
- uses: actions/checkout@v4
|
||||
- uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.11"
|
||||
python-version: "3.12"
|
||||
|
||||
- name: Install pre-commit
|
||||
run: pip install pre-commit
|
||||
|
@ -37,7 +37,7 @@ jobs:
|
|||
run: pre-commit autoupdate
|
||||
|
||||
- name: Create Pull Request
|
||||
uses: peter-evans/create-pull-request@v5
|
||||
uses: peter-evans/create-pull-request@v7
|
||||
with:
|
||||
token: ${{ secrets.GITHUB_TOKEN }}
|
||||
branch: update/pre-commit-autoupdate
|
||||
|
|
17
.github/workflows/update-changelog.yml
vendored
17
.github/workflows/update-changelog.yml
vendored
|
@ -8,27 +8,20 @@ on:
|
|||
workflow_dispatch:
|
||||
|
||||
jobs:
|
||||
release:
|
||||
update:
|
||||
# Disables this workflow from running in a repository that is not part of the indicated organization/user
|
||||
if: github.repository_owner == 'cookiecutter'
|
||||
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.11"
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements.txt
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v5
|
||||
- name: Set git details
|
||||
run: |
|
||||
git config --global user.name "github-actions"
|
||||
git config --global user.email "action@github.com"
|
||||
- name: Update list
|
||||
run: python scripts/update_changelog.py
|
||||
- name: Update changelog
|
||||
run: uv run --frozen scripts/update_changelog.py
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
|
15
.github/workflows/update-contributors.yml
vendored
15
.github/workflows/update-contributors.yml
vendored
|
@ -18,22 +18,15 @@ jobs:
|
|||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: "3.11"
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
pip install -r requirements.txt
|
||||
- name: Install uv
|
||||
uses: astral-sh/setup-uv@v5
|
||||
- name: Update list
|
||||
run: python scripts/update_contributors.py
|
||||
run: uv run --frozen scripts/update_contributors.py
|
||||
env:
|
||||
GITHUB_TOKEN: ${{ secrets.GITHUB_TOKEN }}
|
||||
|
||||
- name: Commit changes
|
||||
uses: stefanzweifel/git-auto-commit-action@v5.0.0
|
||||
uses: stefanzweifel/git-auto-commit-action@v5.0.1
|
||||
with:
|
||||
commit_message: Update Contributors
|
||||
file_pattern: CONTRIBUTORS.md .github/contributors.json
|
||||
|
|
|
@ -1,12 +1,13 @@
|
|||
exclude: "{{cookiecutter.project_slug}}|.github/contributors.json|CHANGELOG.md|CONTRIBUTORS.md"
|
||||
default_stages: [commit]
|
||||
default_stages: [pre-commit]
|
||||
minimum_pre_commit_version: "3.2.0"
|
||||
|
||||
default_language_version:
|
||||
python: python3.11
|
||||
python: python3.12
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.5.0
|
||||
rev: v5.0.0
|
||||
hooks:
|
||||
- id: trailing-whitespace
|
||||
- id: end-of-file-fixer
|
||||
|
@ -26,27 +27,32 @@ repos:
|
|||
args: ["--tab-width", "2"]
|
||||
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
rev: v3.15.0
|
||||
rev: v3.19.1
|
||||
hooks:
|
||||
- id: pyupgrade
|
||||
args: [--py311-plus]
|
||||
args: [--py312-plus]
|
||||
exclude: hooks/
|
||||
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 23.12.1
|
||||
rev: 25.1.0
|
||||
hooks:
|
||||
- id: black
|
||||
|
||||
- repo: https://github.com/PyCQA/isort
|
||||
rev: 5.13.2
|
||||
rev: 6.0.1
|
||||
hooks:
|
||||
- id: isort
|
||||
|
||||
- repo: https://github.com/PyCQA/flake8
|
||||
rev: 7.0.0
|
||||
rev: 7.2.0
|
||||
hooks:
|
||||
- id: flake8
|
||||
|
||||
- repo: https://github.com/tox-dev/pyproject-fmt
|
||||
rev: "v2.5.1"
|
||||
hooks:
|
||||
- id: pyproject-fmt
|
||||
|
||||
ci:
|
||||
autoupdate_schedule: weekly
|
||||
skip: []
|
||||
|
|
|
@ -14,8 +14,6 @@ pin: True
|
|||
label_prs: update
|
||||
|
||||
requirements:
|
||||
- "requirements.txt"
|
||||
- "docs/requirements.txt"
|
||||
- "{{cookiecutter.project_slug}}/requirements/base.txt"
|
||||
- "{{cookiecutter.project_slug}}/requirements/local.txt"
|
||||
- "{{cookiecutter.project_slug}}/requirements/production.txt"
|
||||
|
|
|
@ -8,13 +8,14 @@ version: 2
|
|||
build:
|
||||
os: ubuntu-22.04
|
||||
tools:
|
||||
python: "3.11"
|
||||
python: "3.12"
|
||||
commands:
|
||||
- asdf plugin add uv
|
||||
- asdf install uv latest
|
||||
- asdf global uv latest
|
||||
- uv sync --only-group docs --frozen
|
||||
- uv run -m sphinx -T -b html -d docs/_build/doctrees -D language=en docs $READTHEDOCS_OUTPUT/html
|
||||
|
||||
# Build documentation in the docs/ directory with Sphinx
|
||||
sphinx:
|
||||
configuration: docs/conf.py
|
||||
|
||||
# Declare the Python requirements required to build your docs
|
||||
python:
|
||||
install:
|
||||
- requirements: docs/requirements.txt
|
||||
|
|
2603
CHANGELOG.md
2603
CHANGELOG.md
File diff suppressed because it is too large
Load Diff
|
@ -18,39 +18,26 @@ This last step is very important, don't start developing from master, it'll caus
|
|||
|
||||
## Testing
|
||||
|
||||
You'll need to run the tests using Python 3.11. We recommend using [tox](https://tox.readthedocs.io/en/latest/) to run the tests. It will automatically create a fresh virtual environment and install our test dependencies, such as [pytest-cookies](https://pypi.python.org/pypi/pytest-cookies/) and [flake8](https://pypi.python.org/pypi/flake8/).
|
||||
You'll need to run the tests using Python 3.12. We recommend using [tox](https://tox.readthedocs.io/en/latest/) to run the tests. It will automatically create a fresh virtual environment and install our test dependencies, such as [pytest-cookies](https://pypi.python.org/pypi/pytest-cookies/) and [flake8](https://pypi.python.org/pypi/flake8/).
|
||||
|
||||
We'll also run the tests on GitHub actions when you send your pull request, but it's a good idea to run them locally before you send it.
|
||||
|
||||
### Installation
|
||||
|
||||
First, make sure that your version of Python is 3.11:
|
||||
|
||||
```bash
|
||||
$ python --version
|
||||
Python 3.11.3
|
||||
```
|
||||
|
||||
Any version that starts with 3.11 will do. If you need to install it, you can get it from [python.org](https://www.python.org/downloads/).
|
||||
|
||||
Then install `tox`, if not already installed:
|
||||
|
||||
```bash
|
||||
$ python -m pip install tox
|
||||
```
|
||||
We use uv to manage our environment and manage our Python installation. You can install it following the instructions at https://docs.astral.sh/uv/getting-started/installation/
|
||||
|
||||
### Run the template's test suite
|
||||
|
||||
To run the tests of the template using the current Python version:
|
||||
|
||||
```bash
|
||||
$ tox -e py
|
||||
$ uv run tox run -e py
|
||||
```
|
||||
|
||||
This uses `pytest `under the hood, and you can pass options to it after a `--`. So to run a particular test:
|
||||
|
||||
```bash
|
||||
$ tox -e py -- -k test_default_configuration
|
||||
$ uv run tox run -e py -- -k test_default_configuration
|
||||
```
|
||||
|
||||
For further information, please consult the [pytest usage docs](https://pytest.org/en/latest/how-to/usage.html#specifying-which-tests-to-run).
|
||||
|
@ -66,13 +53,13 @@ $ source venv/bin/activate
|
|||
|
||||
These tests are slower and can be run with or without Docker:
|
||||
|
||||
- Without Docker: `scripts/test_bare.sh` (for bare metal)
|
||||
- With Docker: `scripts/test_docker.sh`
|
||||
- Without Docker: `tests/test_bare.sh` (for bare metal)
|
||||
- With Docker: `tests/test_docker.sh`
|
||||
|
||||
All arguments to these scripts will be passed to the `cookiecutter` CLI, letting you set options, for example:
|
||||
|
||||
```bash
|
||||
$ scripts/test_bare.sh use_celery=y
|
||||
$ tests/test_bare.sh use_celery=y
|
||||
```
|
||||
|
||||
## Submitting a pull request
|
||||
|
|
301
CONTRIBUTORS.md
301
CONTRIBUTORS.md
|
@ -201,6 +201,20 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Aidos Kanapyanov</td>
|
||||
<td>
|
||||
<a href="https://github.com/aidoskanapyanov">aidoskanapyanov</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Alan Cyment</td>
|
||||
<td>
|
||||
<a href="https://github.com/acyment">acyment</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Alberto Sanchez</td>
|
||||
<td>
|
||||
|
@ -208,6 +222,13 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Alex Kanavos</td>
|
||||
<td>
|
||||
<a href="https://github.com/alexkanavos">alexkanavos</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Alex Tsai</td>
|
||||
<td>
|
||||
|
@ -215,6 +236,20 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Alexandr Artemyev</td>
|
||||
<td>
|
||||
<a href="https://github.com/Mogost">Mogost</a>
|
||||
</td>
|
||||
<td>MOGOST</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Ali Shamakhi</td>
|
||||
<td>
|
||||
<a href="https://github.com/ali-shamakhi">ali-shamakhi</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Alvaro [Andor]</td>
|
||||
<td>
|
||||
|
@ -523,6 +558,13 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Christian Jensen</td>
|
||||
<td>
|
||||
<a href="https://github.com/jensenbox">jensenbox</a>
|
||||
</td>
|
||||
<td>cjensen</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Christopher Clarke</td>
|
||||
<td>
|
||||
|
@ -656,6 +698,13 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td>DavidDiazPinto</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>David Păcioianu</td>
|
||||
<td>
|
||||
<a href="https://github.com/DavidPacioianu">DavidPacioianu</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Davit Tovmasyan</td>
|
||||
<td>
|
||||
|
@ -698,6 +747,13 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Denis Darii</td>
|
||||
<td>
|
||||
<a href="https://github.com/DNX">DNX</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Denis Orehovsky</td>
|
||||
<td>
|
||||
|
@ -761,6 +817,13 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Ed Morley</td>
|
||||
<td>
|
||||
<a href="https://github.com/edmorley">edmorley</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Emanuel Calso</td>
|
||||
<td>
|
||||
|
@ -810,6 +873,13 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td>fabaff</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>farwill</td>
|
||||
<td>
|
||||
<a href="https://github.com/farwill">farwill</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Fateme Fouladkar</td>
|
||||
<td>
|
||||
|
@ -824,6 +894,13 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Filipe Nascimento</td>
|
||||
<td>
|
||||
<a href="https://github.com/FilipeNas">FilipeNas</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Florian Idelberger</td>
|
||||
<td>
|
||||
|
@ -838,6 +915,13 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Francisco Navarro Morales </td>
|
||||
<td>
|
||||
<a href="https://github.com/spothound">spothound</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Freddy</td>
|
||||
<td>
|
||||
|
@ -873,6 +957,13 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Geo Maciolek</td>
|
||||
<td>
|
||||
<a href="https://github.com/GeoMaciolek">GeoMaciolek</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>ghazi-git</td>
|
||||
<td>
|
||||
|
@ -915,6 +1006,13 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>GvS</td>
|
||||
<td>
|
||||
<a href="https://github.com/GvS666">GvS666</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Hamish Durkin</td>
|
||||
<td>
|
||||
|
@ -922,6 +1020,13 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Hana Belay</td>
|
||||
<td>
|
||||
<a href="https://github.com/earthcomfy">earthcomfy</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Hana Quadara</td>
|
||||
<td>
|
||||
|
@ -964,6 +1069,13 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>henningbra</td>
|
||||
<td>
|
||||
<a href="https://github.com/henningbra">henningbra</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Henrique G. G. Pereira</td>
|
||||
<td>
|
||||
|
@ -999,6 +1111,13 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Igor Jerosimić</td>
|
||||
<td>
|
||||
<a href="https://github.com/igor-wl">igor-wl</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Imran Rahman</td>
|
||||
<td>
|
||||
|
@ -1041,6 +1160,13 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>JAEGYUN JUNG</td>
|
||||
<td>
|
||||
<a href="https://github.com/TGoddessana">TGoddessana</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Jakub Boukal</td>
|
||||
<td>
|
||||
|
@ -1076,6 +1202,20 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Jason Mok</td>
|
||||
<td>
|
||||
<a href="https://github.com/jasonmokk">jasonmokk</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Jens Kaeske</td>
|
||||
<td>
|
||||
<a href="https://github.com/jkaeske">jkaeske</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Jens Nilsson</td>
|
||||
<td>
|
||||
|
@ -1118,6 +1258,13 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td>cass_john</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Johnny Metz</td>
|
||||
<td>
|
||||
<a href="https://github.com/johnnymetz">johnnymetz</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Jonathan Thompson</td>
|
||||
<td>
|
||||
|
@ -1188,6 +1335,13 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Kawsar Alam Foysal</td>
|
||||
<td>
|
||||
<a href="https://github.com/iamfoysal">iamfoysal</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Keith Bailey</td>
|
||||
<td>
|
||||
|
@ -1216,6 +1370,13 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Kevin Mills</td>
|
||||
<td>
|
||||
<a href="https://github.com/millsks">millsks</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Kevin Ndung'u</td>
|
||||
<td>
|
||||
|
@ -1321,6 +1482,13 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>LJFP</td>
|
||||
<td>
|
||||
<a href="https://github.com/ljfp">ljfp</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Luis Nell</td>
|
||||
<td>
|
||||
|
@ -1349,6 +1517,13 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td>flyudvik</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Manas Mallick</td>
|
||||
<td>
|
||||
<a href="https://github.com/ManDun">ManDun</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Manjit Pardeshi</td>
|
||||
<td>
|
||||
|
@ -1363,6 +1538,27 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td>marciomazza</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Marios Frixou</td>
|
||||
<td>
|
||||
<a href="https://github.com/frixou89">frixou89</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Mariot Tsitoara</td>
|
||||
<td>
|
||||
<a href="https://github.com/mariot">mariot</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Marlon Castillo</td>
|
||||
<td>
|
||||
<a href="https://github.com/mcastle">mcastle</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Martin Blech</td>
|
||||
<td>
|
||||
|
@ -1440,6 +1636,13 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Matthew Foster Walsh</td>
|
||||
<td>
|
||||
<a href="https://github.com/mfosterw">mfosterw</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Matthew Sisley</td>
|
||||
<td>
|
||||
|
@ -1475,13 +1678,6 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>mfosterw</td>
|
||||
<td>
|
||||
<a href="https://github.com/mfosterw">mfosterw</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Michael Gecht</td>
|
||||
<td>
|
||||
|
@ -1510,6 +1706,13 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>milvagox</td>
|
||||
<td>
|
||||
<a href="https://github.com/milvagox">milvagox</a>
|
||||
</td>
|
||||
<td>milvagox</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Min ho Kim</td>
|
||||
<td>
|
||||
|
@ -1538,6 +1741,13 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Mounir</td>
|
||||
<td>
|
||||
<a href="https://github.com/mounirmesselmeni">mounirmesselmeni</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>mozillazg</td>
|
||||
<td>
|
||||
|
@ -1559,6 +1769,13 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Nadav Peretz</td>
|
||||
<td>
|
||||
<a href="https://github.com/nadavperetz">nadavperetz</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Naveen</td>
|
||||
<td>
|
||||
|
@ -1622,6 +1839,13 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td>pamelafox</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Param Kapur</td>
|
||||
<td>
|
||||
<a href="https://github.com/paramkpr">paramkpr</a>
|
||||
</td>
|
||||
<td>ParamKapur</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Parbhat Puri</td>
|
||||
<td>
|
||||
|
@ -1636,6 +1860,20 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Patrick Zhang</td>
|
||||
<td>
|
||||
<a href="https://github.com/PatDuJour">PatDuJour</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Paul Wulff</td>
|
||||
<td>
|
||||
<a href="https://github.com/mtmpaulwulff">mtmpaulwulff</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Pawan Chaurasia</td>
|
||||
<td>
|
||||
|
@ -1650,6 +1888,13 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Pepa</td>
|
||||
<td>
|
||||
<a href="https://github.com/07pepa">07pepa</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Peter Bittner</td>
|
||||
<td>
|
||||
|
@ -1692,6 +1937,27 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Pulse-Mind</td>
|
||||
<td>
|
||||
<a href="https://github.com/pulse-mind">pulse-mind</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>quroom</td>
|
||||
<td>
|
||||
<a href="https://github.com/quroom">quroom</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>qwerrrqw</td>
|
||||
<td>
|
||||
<a href="https://github.com/qwerrrqw">qwerrrqw</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Raony Guimarães Corrêa</td>
|
||||
<td>
|
||||
|
@ -1769,6 +2035,13 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>rxm7706</td>
|
||||
<td>
|
||||
<a href="https://github.com/rxm7706">rxm7706</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Ryan Fitch</td>
|
||||
<td>
|
||||
|
@ -1811,6 +2084,13 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td>shywn_mrk</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Simeon Emanuilov</td>
|
||||
<td>
|
||||
<a href="https://github.com/s-emanuilov">s-emanuilov</a>
|
||||
</td>
|
||||
<td>s_emanuilov</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Simon Rey</td>
|
||||
<td>
|
||||
|
@ -2063,6 +2343,13 @@ Listed in alphabetical order.
|
|||
</td>
|
||||
<td>westurner</td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Will</td>
|
||||
<td>
|
||||
<a href="https://github.com/novucs">novucs</a>
|
||||
</td>
|
||||
<td></td>
|
||||
</tr>
|
||||
<tr>
|
||||
<td>Will Farley</td>
|
||||
<td>
|
||||
|
|
41
README.md
41
README.md
|
@ -6,21 +6,21 @@
|
|||
[](https://github.com/ambv/black)
|
||||
|
||||
[](https://pyup.io/repos/github/cookiecutter/cookiecutter-django/)
|
||||
[](https://discord.gg/uFXweDQc5a)
|
||||
[](https://discord.gg/rAWFUP47d2)
|
||||
[](https://www.codetriage.com/cookiecutter/cookiecutter-django)
|
||||
|
||||
Powered by [Cookiecutter](https://github.com/cookiecutter/cookiecutter), Cookiecutter Django is a framework for jumpstarting
|
||||
production-ready Django projects quickly.
|
||||
|
||||
- Documentation: <https://cookiecutter-django.readthedocs.io/en/latest/>
|
||||
- See [Troubleshooting](https://cookiecutter-django.readthedocs.io/en/latest/troubleshooting.html) for common errors and obstacles
|
||||
- See [Troubleshooting](https://cookiecutter-django.readthedocs.io/en/latest/5-help/troubleshooting.html) for common errors and obstacles
|
||||
- If you have problems with Cookiecutter Django, please open [issues](https://github.com/cookiecutter/cookiecutter-django/issues/new) don't send
|
||||
emails to the maintainers.
|
||||
|
||||
## Features
|
||||
|
||||
- For Django 4.2
|
||||
- Works with Python 3.11
|
||||
- For Django 5.1
|
||||
- Works with Python 3.12
|
||||
- Renders Django projects with 100% starting test coverage
|
||||
- Twitter [Bootstrap](https://github.com/twbs/bootstrap) v5
|
||||
- [12-Factor](https://12factor.net) based settings via [django-environ](https://github.com/joke2k/django-environ)
|
||||
|
@ -51,7 +51,7 @@ _These features can be enabled during initial project setup._
|
|||
## Constraints
|
||||
|
||||
- Only maintained 3rd party libraries are used.
|
||||
- Uses PostgreSQL everywhere: 10 - 15 ([MySQL fork](https://github.com/mabdullahadeel/cookiecutter-django-mysql) also available).
|
||||
- Uses PostgreSQL everywhere: 13 - 16 ([MySQL fork](https://github.com/mabdullahadeel/cookiecutter-django-mysql) also available).
|
||||
- Environment variables for configuration (This won't work with Apache/mod_wsgi).
|
||||
|
||||
## Support this Project!
|
||||
|
@ -65,19 +65,15 @@ This project is an open source project run by volunteers. You can sponsor us via
|
|||
|
||||
Projects that provide financial support to the maintainers:
|
||||
|
||||
---
|
||||
### Two Scoops of Django
|
||||
|
||||
<p align="center">
|
||||
<a href="https://www.feldroy.com/products//two-scoops-of-django-3-x"><img src="https://cdn.shopify.com/s/files/1/0304/6901/products/Two-Scoops-of-Django-3-Alpha-Cover_540x_26507b15-e489-470b-8a97-02773dd498d1_1080x.jpg"></a>
|
||||
</p>
|
||||
[](https://www.feldroy.com/two-scoops-press#two-scoops-of-django)
|
||||
|
||||
Two Scoops of Django 3.x is the best ice cream-themed Django reference in the universe!
|
||||
|
||||
### PyUp
|
||||
|
||||
<p align="center">
|
||||
<a href="https://pyup.io/"><img src="https://pyup.io/static/images/logo.png"></a>
|
||||
</p>
|
||||
[](https://pyup.io)
|
||||
|
||||
PyUp brings you automated security and dependency updates used by Google and other organizations. Free for open source projects!
|
||||
|
||||
|
@ -98,7 +94,7 @@ You'll be prompted for some values. Provide them, then a Django project will be
|
|||
|
||||
**Warning**: After this point, change 'Daniel Greenfeld', 'pydanny', etc to your own information.
|
||||
|
||||
Answer the prompts with your own desired [options](http://cookiecutter-django.readthedocs.io/en/latest/project-generation-options.html). For example:
|
||||
Answer the prompts with your own desired [options](http://cookiecutter-django.readthedocs.io/en/latest/1-getting-started/project-generation-options.html). For example:
|
||||
|
||||
Cloning into 'cookiecutter-django'...
|
||||
remote: Counting objects: 550, done.
|
||||
|
@ -133,13 +129,11 @@ Answer the prompts with your own desired [options](http://cookiecutter-django.re
|
|||
Choose from 1, 2, 3 [1]: 1
|
||||
use_docker [n]: n
|
||||
Select postgresql_version:
|
||||
1 - 15
|
||||
2 - 14
|
||||
3 - 13
|
||||
4 - 12
|
||||
5 - 11
|
||||
6 - 10
|
||||
Choose from 1, 2, 3, 4, 5 [1]: 1
|
||||
1 - 16
|
||||
2 - 15
|
||||
3 - 14
|
||||
4 - 13
|
||||
Choose from 1, 2, 3, 4 [1]: 1
|
||||
Select cloud_provider:
|
||||
1 - AWS
|
||||
2 - GCP
|
||||
|
@ -152,7 +146,7 @@ Answer the prompts with your own desired [options](http://cookiecutter-django.re
|
|||
4 - Mandrill
|
||||
5 - Postmark
|
||||
6 - Sendgrid
|
||||
7 - SendinBlue
|
||||
7 - Brevo (formerly SendinBlue)
|
||||
8 - SparkPost
|
||||
9 - Other SMTP
|
||||
Choose from 1, 2, 3, 4, 5, 6, 7, 8, 9 [1]: 1
|
||||
|
@ -195,8 +189,8 @@ Now take a look at your repo. Don't forget to carefully look at the generated RE
|
|||
|
||||
For local development, see the following:
|
||||
|
||||
- [Developing locally](http://cookiecutter-django.readthedocs.io/en/latest/developing-locally.html)
|
||||
- [Developing locally using docker](http://cookiecutter-django.readthedocs.io/en/latest/developing-locally-docker.html)
|
||||
- [Developing locally](https://cookiecutter-django.readthedocs.io/en/latest/2-local-development/developing-locally.html)
|
||||
- [Developing locally using docker](https://cookiecutter-django.readthedocs.io/en/latest/2-local-development/developing-locally-docker.html)
|
||||
|
||||
## Community
|
||||
|
||||
|
@ -249,6 +243,7 @@ experience better.
|
|||
|
||||
## Articles
|
||||
|
||||
- [Why cookiecutter-django is Essential for Your Next Django Project](https://medium.com/@millsks/why-cookiecutter-django-is-essential-for-your-next-django-project-7d3c00cdce51) - Aug. 4, 2024
|
||||
- [How to Make Your Own Django Cookiecutter Template!](https://medium.com/@FatemeFouladkar/how-to-make-your-own-django-cookiecutter-template-a753d4cbb8c2) - Aug. 10, 2023
|
||||
- [Cookiecutter Django With Amazon RDS](https://haseeburrehman.com/posts/cookiecutter-django-with-amazon-rds/) - Apr, 2, 2021
|
||||
- [Complete Walkthrough: Blue/Green Deployment to AWS ECS using GitHub actions](https://github.com/Andrew-Chen-Wang/cookiecutter-django-ecs-github) - June 10, 2020
|
||||
|
|
|
@ -18,7 +18,7 @@
|
|||
"windows": "n",
|
||||
"editor": ["None", "PyCharm", "VS Code"],
|
||||
"use_docker": "n",
|
||||
"postgresql_version": ["15", "14", "13", "12", "11", "10"],
|
||||
"postgresql_version": ["16", "15", "14", "13"],
|
||||
"cloud_provider": ["AWS", "GCP", "Azure", "None"],
|
||||
"mail_service": [
|
||||
"Mailgun",
|
||||
|
@ -27,7 +27,7 @@
|
|||
"Mandrill",
|
||||
"Postmark",
|
||||
"Sendgrid",
|
||||
"SendinBlue",
|
||||
"Brevo",
|
||||
"SparkPost",
|
||||
"Other SMTP"
|
||||
],
|
||||
|
|
|
@ -66,12 +66,10 @@ use_docker:
|
|||
postgresql_version:
|
||||
Select a PostgreSQL_ version to use. The choices are:
|
||||
|
||||
1. 15
|
||||
2. 14
|
||||
3. 13
|
||||
4. 12
|
||||
5. 11
|
||||
6. 10
|
||||
1. 16
|
||||
2. 15
|
||||
3. 14
|
||||
4. 13
|
||||
|
||||
cloud_provider:
|
||||
Select a cloud provider for static & media files. The choices are:
|
||||
|
@ -92,7 +90,7 @@ mail_service:
|
|||
4. Mandrill_
|
||||
5. Postmark_
|
||||
6. SendGrid_
|
||||
7. SendinBlue_
|
||||
7. `Brevo (formerly SendinBlue)`_
|
||||
8. SparkPost_
|
||||
9. `Other SMTP`_
|
||||
|
||||
|
@ -175,7 +173,7 @@ debug:
|
|||
.. _Mandrill: http://mandrill.com
|
||||
.. _Postmark: https://postmarkapp.com
|
||||
.. _SendGrid: https://sendgrid.com
|
||||
.. _SendinBlue: https://www.sendinblue.com
|
||||
.. _Brevo (formerly SendinBlue): https://www.brevo.com
|
||||
.. _SparkPost: https://www.sparkpost.com
|
||||
.. _Other SMTP: https://anymail.readthedocs.io/en/stable/
|
||||
|
|
@ -39,7 +39,6 @@ The following table lists settings and their defaults for third-party applicatio
|
|||
======================================= =========================== ============================================== ======================================================================
|
||||
Environment Variable Django Setting Development Default Production Default
|
||||
======================================= =========================== ============================================== ======================================================================
|
||||
CELERY_BROKER_URL CELERY_BROKER_URL auto w/ Docker; raises error w/o raises error
|
||||
DJANGO_AWS_ACCESS_KEY_ID AWS_ACCESS_KEY_ID n/a raises error
|
||||
DJANGO_AWS_SECRET_ACCESS_KEY AWS_SECRET_ACCESS_KEY n/a raises error
|
||||
DJANGO_AWS_STORAGE_BUCKET_NAME AWS_STORAGE_BUCKET_NAME n/a raises error
|
||||
|
@ -69,8 +68,8 @@ SENDGRID_API_KEY SENDGRID_API_KEY n/a
|
|||
SENDGRID_GENERATE_MESSAGE_ID True n/a raises error
|
||||
SENDGRID_MERGE_FIELD_FORMAT None n/a raises error
|
||||
SENDGRID_API_URL n/a n/a "https://api.sendgrid.com/v3/"
|
||||
SENDINBLUE_API_KEY SENDINBLUE_API_KEY n/a raises error
|
||||
SENDINBLUE_API_URL n/a n/a "https://api.sendinblue.com/v3/"
|
||||
BREVO_API_KEY BREVO_API_KEY n/a raises error
|
||||
BREVO_API_URL n/a n/a "https://api.brevo.com/v3/"
|
||||
SPARKPOST_API_KEY SPARKPOST_API_KEY n/a raises error
|
||||
SPARKPOST_API_URL n/a n/a "https://api.sparkpost.com/api/v1"
|
||||
======================================= =========================== ============================================== ======================================================================
|
|
@ -32,9 +32,9 @@ Build the Stack
|
|||
|
||||
This can take a while, especially the first time you run this particular command on your development system::
|
||||
|
||||
$ docker compose -f local.yml build
|
||||
$ docker compose -f docker-compose.local.yml build
|
||||
|
||||
Generally, if you want to emulate production environment use ``production.yml`` instead. And this is true for any other actions you might need to perform: whenever a switch is required, just do it!
|
||||
Generally, if you want to emulate production environment use ``docker-compose.production.yml`` instead. And this is true for any other actions you might need to perform: whenever a switch is required, just do it!
|
||||
|
||||
Before doing any git commit, `pre-commit`_ should be installed globally on your local machine, and then::
|
||||
|
||||
|
@ -43,7 +43,6 @@ Before doing any git commit, `pre-commit`_ should be installed globally on your
|
|||
|
||||
Failing to do so will result with a bunch of CI and Linter errors that can be avoided with pre-commit.
|
||||
|
||||
|
||||
Run the Stack
|
||||
-------------
|
||||
|
||||
|
@ -51,11 +50,11 @@ This brings up both Django and PostgreSQL. The first time it is run it might tak
|
|||
|
||||
Open a terminal at the project root and run the following for local development::
|
||||
|
||||
$ docker compose -f local.yml up
|
||||
$ docker compose -f docker-compose.local.yml up
|
||||
|
||||
You can also set the environment variable ``COMPOSE_FILE`` pointing to ``local.yml`` like this::
|
||||
You can also set the environment variable ``COMPOSE_FILE`` pointing to ``docker-compose.local.yml`` like this::
|
||||
|
||||
$ export COMPOSE_FILE=local.yml
|
||||
$ export COMPOSE_FILE=docker-compose.local.yml
|
||||
|
||||
And then run::
|
||||
|
||||
|
@ -65,16 +64,23 @@ To run in a detached (background) mode, just::
|
|||
|
||||
$ docker compose up -d
|
||||
|
||||
These commands don't run the docs service. In order to run docs service you can run::
|
||||
|
||||
$ docker compose -f docker-compose.docs.yml up
|
||||
|
||||
To run the docs with local services just use::
|
||||
|
||||
$ docker compose -f docker-compose.local.yml -f docker-compose.docs.yml up
|
||||
|
||||
The site should start and be accessible at http://localhost:3000 if you selected Webpack or Gulp as frontend pipeline and http://localhost:8000 otherwise.
|
||||
|
||||
Execute Management Commands
|
||||
---------------------------
|
||||
|
||||
As with any shell command that we wish to run in our container, this is done using the ``docker compose -f local.yml run --rm`` command: ::
|
||||
As with any shell command that we wish to run in our container, this is done using the ``docker compose -f docker-compose.local.yml run --rm`` command: ::
|
||||
|
||||
$ docker compose -f local.yml run --rm django python manage.py migrate
|
||||
$ docker compose -f local.yml run --rm django python manage.py createsuperuser
|
||||
$ docker compose -f docker-compose.local.yml run --rm django python manage.py migrate
|
||||
$ docker compose -f docker-compose.local.yml run --rm django python manage.py createsuperuser
|
||||
|
||||
Here, ``django`` is the target service we are executing the commands against.
|
||||
Also, please note that the ``docker exec`` does not work for running management commands.
|
||||
|
@ -84,13 +90,12 @@ Also, please note that the ``docker exec`` does not work for running management
|
|||
|
||||
When ``DEBUG`` is set to ``True``, the host is validated against ``['localhost', '127.0.0.1', '[::1]']``. This is adequate when running a ``virtualenv``. For Docker, in the ``config.settings.local``, add your host development server IP to ``INTERNAL_IPS`` or ``ALLOWED_HOSTS`` if the variable exists.
|
||||
|
||||
|
||||
.. _envs:
|
||||
|
||||
Configuring the Environment
|
||||
---------------------------
|
||||
|
||||
This is the excerpt from your project's ``local.yml``: ::
|
||||
This is the excerpt from your project's ``docker-compose.local.yml``: ::
|
||||
|
||||
# ...
|
||||
|
||||
|
@ -110,8 +115,8 @@ The most important thing for us here now is ``env_file`` section enlisting ``./.
|
|||
|
||||
.envs
|
||||
├── .local
|
||||
│ ├── .django
|
||||
│ └── .postgres
|
||||
│ ├── .django
|
||||
│ └── .postgres
|
||||
└── .production
|
||||
├── .django
|
||||
└── .postgres
|
||||
|
@ -156,8 +161,8 @@ You have to modify the relevant requirement file: base, local or production by a
|
|||
|
||||
To get this change picked up, you'll need to rebuild the image(s) and restart the running container: ::
|
||||
|
||||
docker compose -f local.yml build
|
||||
docker compose -f local.yml up
|
||||
docker compose -f docker-compose.local.yml build
|
||||
docker compose -f docker-compose.local.yml up
|
||||
|
||||
Debugging
|
||||
~~~~~~~~~
|
||||
|
@ -171,7 +176,7 @@ If you are using the following within your code to debug: ::
|
|||
|
||||
Then you may need to run the following for it to work as desired: ::
|
||||
|
||||
$ docker compose -f local.yml run --rm --service-ports django
|
||||
$ docker compose -f docker-compose.local.yml run --rm --service-ports django
|
||||
|
||||
|
||||
django-debug-toolbar
|
||||
|
@ -188,7 +193,6 @@ The ``container_name`` from the yml file can be used to check on containers with
|
|||
$ docker logs <project_slug>_local_celeryworker
|
||||
$ docker top <project_slug>_local_celeryworker
|
||||
|
||||
|
||||
Notice that the ``container_name`` is generated dynamically using your project slug as a prefix
|
||||
|
||||
Mailpit
|
||||
|
@ -224,7 +228,7 @@ Prerequisites:
|
|||
* ``use_docker`` was set to ``y`` on project initialization;
|
||||
* ``use_celery`` was set to ``y`` on project initialization.
|
||||
|
||||
By default, it's enabled both in local and production environments (``local.yml`` and ``production.yml`` Docker Compose configs, respectively) through a ``flower`` service. For added security, ``flower`` requires its clients to provide authentication credentials specified as the corresponding environments' ``.envs/.local/.django`` and ``.envs/.production/.django`` ``CELERY_FLOWER_USER`` and ``CELERY_FLOWER_PASSWORD`` environment variables. Check out ``localhost:5555`` and see for yourself.
|
||||
By default, it's enabled both in local and production environments (``docker-compose.local.yml`` and ``docker-compose.production.yml`` Docker Compose configs, respectively) through a ``flower`` service. For added security, ``flower`` requires its clients to provide authentication credentials specified as the corresponding environments' ``.envs/.local/.django`` and ``.envs/.production/.django`` ``CELERY_FLOWER_USER`` and ``CELERY_FLOWER_PASSWORD`` environment variables. Check out ``localhost:5555`` and see for yourself.
|
||||
|
||||
.. _`Flower`: https://github.com/mher/flower
|
||||
|
||||
|
@ -238,46 +242,65 @@ The stack comes with a dedicated node service to build the static assets, watch
|
|||
.. _Sass: https://sass-lang.com/
|
||||
.. _live reloading: https://browsersync.io
|
||||
|
||||
Developing locally with HTTPS
|
||||
-----------------------------
|
||||
|
||||
Increasingly it is becoming necessary to develop software in a secure environment in order that there are very few changes when deploying to production. Recently Facebook changed their policies for apps/sites that use Facebook login which requires the use of an HTTPS URL for the OAuth redirect URL. So if you want to use the ``users`` application with a OAuth provider such as Facebook, securing your communication to the local development environment will be necessary.
|
||||
Using Just for Docker Commands
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
In order to create a secure environment, we need to have a trusted SSL certificate installed in our Docker application.
|
||||
We have included a ``justfile`` to simplify the use of frequent Docker commands for local development.
|
||||
|
||||
#. **Let's Encrypt**
|
||||
.. warning::
|
||||
Currently, "Just" does not reliably handle signals or forward them to its subprocesses. As a result,
|
||||
pressing CTRL+C (or sending other signals like SIGTERM, SIGINT, or SIGHUP) may only interrupt
|
||||
"Just" itself rather than its subprocesses.
|
||||
For more information, see `this GitHub issue <https://github.com/casey/just/issues/2473>`_.
|
||||
|
||||
The official line from Let’s Encrypt is:
|
||||
First, install Just using one of the methods described in the `official documentation <https://just.systems/man/en/packages.html>`_.
|
||||
|
||||
[For local development section] ... The best option: Generate your own certificate, either self-signed or signed by a local root, and trust it in your operating system’s trust store. Then use that certificate in your local web server. See below for details.
|
||||
Here are the available commands:
|
||||
|
||||
See `letsencrypt.org - certificates-for-localhost`_
|
||||
- ``just build``
|
||||
Builds the Python image using the local Docker Compose file.
|
||||
|
||||
.. _`letsencrypt.org - certificates-for-localhost`: https://letsencrypt.org/docs/certificates-for-localhost/
|
||||
- ``just up``
|
||||
Starts the containers in detached mode and removes orphaned containers.
|
||||
|
||||
#. **mkcert: Valid Https Certificates For Localhost**
|
||||
- ``just down``
|
||||
Stops the running containers.
|
||||
|
||||
`mkcert`_ is a simple by design tool that hides all the arcane knowledge required to generate valid TLS certificates. It works for any hostname or IP, including localhost. It supports macOS, Linux, and Windows, and Firefox, Chrome and Java. It even works on mobile devices with a couple manual steps.
|
||||
- ``just prune``
|
||||
Stops and removes containers along with their volumes. You can optionally pass an argument with the service name to prune a single container.
|
||||
|
||||
See https://blog.filippo.io/mkcert-valid-https-certificates-for-localhost/
|
||||
- ``just logs``
|
||||
Shows container logs. You can optionally pass an argument with the service name to view logs for a specific service.
|
||||
|
||||
.. _`mkcert`: https://github.com/FiloSottile/mkcert/blob/master/README.md#supported-root-stores
|
||||
- ``just manage <command>``
|
||||
Runs Django management commands within the container. Replace ``<command>`` with any valid Django management command, such as ``migrate``, ``createsuperuser``, or ``shell``.
|
||||
|
||||
After installing a trusted TLS certificate, configure your docker installation. We are going to configure an ``nginx`` reverse-proxy server. This makes sure that it does not interfere with our ``traefik`` configuration that is reserved for production environments.
|
||||
|
||||
These are the places that you should configure to secure your local environment.
|
||||
(Optionally) Developing locally with HTTPS
|
||||
------------------------------------------
|
||||
|
||||
certs
|
||||
Nginx
|
||||
~~~~~
|
||||
|
||||
Take the certificates that you generated and place them in a folder called ``certs`` in the project's root folder. Assuming that you registered your local hostname as ``my-dev-env.local``, the certificates you will put in the folder should have the names ``my-dev-env.local.crt`` and ``my-dev-env.local.key``.
|
||||
If you want to add some sort of social authentication with a OAuth provider such as Facebook, securing your communication to the local development environment will be necessary. These providers usually require that you use an HTTPS URL for the OAuth redirect URL for the Facebook login to work appropriately.
|
||||
|
||||
local.yml
|
||||
~~~~~~~~~
|
||||
Here is a link to an article on `how to add HTTPS using Nginx`_ to your local docker installation. This also includes how to serve files from the ``media`` location, in the event that you are want to serve user-uploaded content.
|
||||
|
||||
#. Add the ``nginx-proxy`` service. ::
|
||||
.. _`how to add HTTPS using Nginx`: https://afroshok.com/cookiecutter-https
|
||||
|
||||
...
|
||||
Webpack
|
||||
~~~~~~~
|
||||
|
||||
If you are using Webpack, first install `mkcert`_. It is a simple by design tool that hides all the arcane knowledge required to generate valid TLS certificates. It works for any hostname or IP, including localhost. It supports macOS, Linux, and Windows, and Firefox, Chrome and Java. It even works on mobile devices with a couple manual steps. See https://blog.filippo.io/mkcert-valid-https-certificates-for-localhost/
|
||||
|
||||
.. _`mkcert`: https://github.com/FiloSottile/mkcert/blob/master/README.md#supported-root-stores
|
||||
|
||||
These are the places that you should configure to secure your local environment. Take the certificates that you generated and place them in a folder called ``certs`` in the project's root folder. Configure an ``nginx`` reverse-proxy server as a ``service`` in the ``docker-compose.local.yml``. This makes sure that it does not interfere with our ``traefik`` configuration that is reserved for production environments.
|
||||
|
||||
Assuming that you registered your local hostname as ``my-dev-env.local``, the certificates you will put in the folder should have the names ``my-dev-env.local.crt`` and ``my-dev-env.local.key``.
|
||||
|
||||
1. Add the ``nginx-proxy`` service to the ``docker-compose.local.yml``. ::
|
||||
|
||||
nginx-proxy:
|
||||
image: jwilder/nginx-proxy:alpine
|
||||
|
@ -290,66 +313,28 @@ local.yml
|
|||
- ./certs:/etc/nginx/certs
|
||||
restart: always
|
||||
depends_on:
|
||||
- django
|
||||
- node
|
||||
environment:
|
||||
- VIRTUAL_HOST=my-dev-env.local
|
||||
- VIRTUAL_PORT=3000
|
||||
|
||||
...
|
||||
2. Add the local secure domain to the ``config/settings/local.py``. You should allow the new hostname ::
|
||||
|
||||
#. Link the ``nginx-proxy`` to ``django`` through environment variables.
|
||||
ALLOWED_HOSTS = ["localhost", "0.0.0.0", "127.0.0.1", "my-dev-env.local"]
|
||||
|
||||
``django`` already has an ``.env`` file connected to it. Add the following variables. You should do this especially if you are working with a team and you want to keep your local environment details to yourself.
|
||||
3. Add the following configuration to the ``devServer`` section of ``webpack/dev.config.js`` ::
|
||||
|
||||
::
|
||||
client: {
|
||||
webSocketURL: 'auto://0.0.0.0:0/ws', // note the `:0` after `0.0.0.0`
|
||||
},
|
||||
|
||||
# HTTPS
|
||||
# ------------------------------------------------------------------------------
|
||||
VIRTUAL_HOST=my-dev-env.local
|
||||
VIRTUAL_PORT=8000
|
||||
|
||||
The services run behind the reverse proxy.
|
||||
|
||||
config/settings/local.py
|
||||
~~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
||||
You should allow the new hostname. ::
|
||||
|
||||
ALLOWED_HOSTS = ["localhost", "0.0.0.0", "127.0.0.1", "my-dev-env.local"]
|
||||
|
||||
Rebuild your ``docker`` application. ::
|
||||
|
||||
$ docker compose -f local.yml up -d --build
|
||||
$ docker compose -f docker-compose.local.yml up -d --build
|
||||
|
||||
Go to your browser and type in your URL bar ``https://my-dev-env.local``
|
||||
Go to your browser and type in your URL bar ``https://my-dev-env.local``.
|
||||
|
||||
See `https with nginx`_ for more information on this configuration.
|
||||
For more on this configuration, see `https with nginx`_.
|
||||
|
||||
.. _`https with nginx`: https://codewithhugo.com/docker-compose-local-https/
|
||||
|
||||
.gitignore
|
||||
~~~~~~~~~~
|
||||
|
||||
Add ``certs/*`` to the ``.gitignore`` file. This allows the folder to be included in the repo but its contents to be ignored.
|
||||
|
||||
*This configuration is for local development environments only. Do not use this for production since you might expose your local* ``rootCA-key.pem``.
|
||||
|
||||
Webpack
|
||||
~~~~~~~
|
||||
|
||||
If you are using Webpack:
|
||||
|
||||
1. On the ``nginx-proxy`` service in ``local.yml``, change ``depends_on`` to ``node`` instead of ``django``.
|
||||
|
||||
2. On the ``node`` service in ``local.yml``, add the following environment configuration:
|
||||
|
||||
::
|
||||
|
||||
environment:
|
||||
- VIRTUAL_HOST=my-dev-env.local
|
||||
- VIRTUAL_PORT=3000
|
||||
|
||||
3. Add the following configuration to the ``devServer`` section of ``webpack/dev.config.js``:
|
||||
|
||||
::
|
||||
|
||||
client: {
|
||||
webSocketURL: 'auto://0.0.0.0:0/ws', // note the `:0` after `0.0.0.0`
|
||||
},
|
||||
.. _`https with nginx`: https://codewithhugo.com/docker-compose-local-https/
|
|
@ -9,7 +9,7 @@ Setting Up Development Environment
|
|||
|
||||
Make sure to have the following on your host:
|
||||
|
||||
* Python 3.11
|
||||
* Python 3.12
|
||||
* PostgreSQL_.
|
||||
* Redis_, if using Celery
|
||||
* Cookiecutter_
|
||||
|
@ -18,14 +18,13 @@ First things first.
|
|||
|
||||
#. Create a virtualenv: ::
|
||||
|
||||
$ python3.11 -m venv <virtual env path>
|
||||
$ python3.12 -m venv <virtual env path>
|
||||
|
||||
#. Activate the virtualenv you have just created: ::
|
||||
|
||||
$ source <virtual env path>/bin/activate
|
||||
|
||||
#.
|
||||
.. include:: generate-project-block.rst
|
||||
#. .. include:: generate-project-block.rst
|
||||
|
||||
#. Install development requirements: ::
|
||||
|
||||
|
@ -56,8 +55,6 @@ First things first.
|
|||
#. Set the environment variables for your database(s): ::
|
||||
|
||||
$ export DATABASE_URL=postgres://postgres:<password>@127.0.0.1:5432/<DB name given to createdb>
|
||||
# Optional: set broker URL if using Celery
|
||||
$ export CELERY_BROKER_URL=redis://localhost:6379/0
|
||||
|
||||
.. note::
|
||||
|
||||
|
@ -96,6 +93,61 @@ First things first.
|
|||
.. _direnv: https://direnv.net/
|
||||
|
||||
|
||||
Creating Your First Django App
|
||||
-------------------------------
|
||||
|
||||
After setting up your environment, you're ready to add your first app. This project uses the setup from "Two Scoops of Django" with a two-tier layout:
|
||||
|
||||
- **Top Level Repository Root** has config files, documentation, `manage.py`, and more.
|
||||
- **Second Level Django Project Root** is where your Django apps live.
|
||||
- **Second Level Configuration Root** holds settings and URL configurations.
|
||||
|
||||
The project layout looks something like this: ::
|
||||
|
||||
<repository_root>/
|
||||
├── config/
|
||||
│ ├── settings/
|
||||
│ │ ├── __init__.py
|
||||
│ │ ├── base.py
|
||||
│ │ ├── local.py
|
||||
│ │ └── production.py
|
||||
│ ├── urls.py
|
||||
│ └── wsgi.py
|
||||
├── <django_project_root>/
|
||||
│ ├── <name_of_the_app>/
|
||||
│ │ ├── migrations/
|
||||
│ │ ├── admin.py
|
||||
│ │ ├── apps.py
|
||||
│ │ ├── models.py
|
||||
│ │ ├── tests.py
|
||||
│ │ └── views.py
|
||||
│ ├── __init__.py
|
||||
│ └── ...
|
||||
├── requirements/
|
||||
│ ├── base.txt
|
||||
│ ├── local.txt
|
||||
│ └── production.txt
|
||||
├── manage.py
|
||||
├── README.md
|
||||
└── ...
|
||||
|
||||
|
||||
Following this structured approach, here's how to add a new app:
|
||||
|
||||
#. **Create the app** using Django's ``startapp`` command, replacing ``<name-of-the-app>`` with your desired app name: ::
|
||||
|
||||
$ python manage.py startapp <name-of-the-app>
|
||||
|
||||
#. **Move the app** to the Django Project Root, maintaining the project's two-tier structure: ::
|
||||
|
||||
$ mv <name-of-the-app> <django_project_root>/
|
||||
|
||||
#. **Edit the app's apps.py** change ``name = '<name-of-the-app>'`` to ``name = '<django_project_root>.<name-of-the-app>'``.
|
||||
|
||||
#. **Register the new app** by adding it to the ``LOCAL_APPS`` list in ``config/settings/base.py``, integrating it as an official component of your project.
|
||||
|
||||
|
||||
|
||||
Setup Email Backend
|
||||
-------------------
|
||||
|
||||
|
@ -164,7 +216,7 @@ The project comes with a simple task for manual testing purposes, inside `<proje
|
|||
|
||||
You can also use Django admin to queue up tasks, thanks to the `django-celerybeat`_ package.
|
||||
|
||||
.. _Getting started with Redis guide: https://redis.io/docs/getting-started/
|
||||
.. _Getting started with Redis: https://redis.io/docs/getting-started/
|
||||
.. _Celery Workers Guide: https://docs.celeryq.dev/en/stable/userguide/workers.html
|
||||
.. _django-celerybeat: https://django-celery-beat.readthedocs.io/en/latest/
|
||||
|
|
@ -12,7 +12,9 @@ Run these commands to deploy the project to Heroku:
|
|||
|
||||
heroku create --buildpack heroku/python
|
||||
|
||||
heroku addons:create heroku-postgresql:mini
|
||||
# Note: this is not a free plan
|
||||
heroku addons:create heroku-postgresql:essential-0
|
||||
|
||||
# On Windows use double quotes for the time zone, e.g.
|
||||
# heroku pg:backups schedule --at "02:00 America/Los_Angeles" DATABASE_URL
|
||||
heroku pg:backups schedule --at '02:00 America/Los_Angeles' DATABASE_URL
|
||||
|
@ -23,10 +25,6 @@ Run these commands to deploy the project to Heroku:
|
|||
# Assuming you chose Mailgun as mail service (see below for others)
|
||||
heroku addons:create mailgun:starter
|
||||
|
||||
heroku config:set PYTHONHASHSEED=random
|
||||
|
||||
heroku config:set WEB_CONCURRENCY=4
|
||||
|
||||
heroku config:set DJANGO_DEBUG=False
|
||||
heroku config:set DJANGO_SETTINGS_MODULE=config.settings.production
|
||||
heroku config:set DJANGO_SECRET_KEY="$(openssl rand -base64 64)"
|
||||
|
@ -46,7 +44,7 @@ Run these commands to deploy the project to Heroku:
|
|||
# Assign with AWS_STORAGE_BUCKET_NAME
|
||||
heroku config:set DJANGO_AWS_STORAGE_BUCKET_NAME=
|
||||
|
||||
git push heroku master
|
||||
git push heroku main
|
||||
|
||||
heroku run python manage.py createsuperuser
|
||||
|
||||
|
@ -64,7 +62,7 @@ The script above assumes that you've chose Mailgun as email service. If you want
|
|||
|
||||
.. warning::
|
||||
|
||||
.. include:: mailgun.rst
|
||||
.. include:: ../includes/mailgun.rst
|
||||
|
||||
Heroku & Docker
|
||||
+++++++++++++++
|
||||
|
@ -85,8 +83,6 @@ it's in the ``Procfile``, but is turned off by default:
|
|||
|
||||
.. code-block:: bash
|
||||
|
||||
# Set the broker URL to Redis
|
||||
heroku config:set CELERY_BROKER_URL=`heroku config:get REDIS_URL`
|
||||
# Scale dyno to 1 instance
|
||||
heroku ps:scale worker=1
|
||||
|
|
@ -37,7 +37,7 @@ Make sure your project is fully committed and pushed up to Bitbucket or Github o
|
|||
mkvirtualenv --python=/usr/bin/python3.10 my-project-name
|
||||
pip install -r requirements/production.txt # may take a few minutes
|
||||
|
||||
.. note:: We're creating the virtualenv using Python 3.10 (``--python=/usr/bin/python3.10```), although Cookiecutter Django generates a project for Python 3.11. This is because, at time of writing, PythonAnywhere only supports Python 3.10. It shouldn't be a problem, but if is, you may try changing the Python version to 3.11 and see if it works. If it does, please let us know, or even better, submit a pull request to update this section.
|
||||
.. note:: We're creating the virtualenv using Python 3.10 (``--python=/usr/bin/python3.10```), although Cookiecutter Django generates a project for Python 3.12. This is because, at time of writing, PythonAnywhere only supports Python 3.10. It shouldn't be a problem, but if is, you may try changing the Python version to 3.12 and see if it works. If it does, please let us know, or even better, submit a pull request to update this section.
|
||||
|
||||
Setting environment variables in the console
|
||||
--------------------------------------------
|
|
@ -14,7 +14,7 @@ Prerequisites
|
|||
Understanding the Docker Compose Setup
|
||||
--------------------------------------
|
||||
|
||||
Before you begin, check out the ``production.yml`` file in the root of this project. Keep note of how it provides configuration for the following services:
|
||||
Before you begin, check out the ``docker-compose.production.yml`` file in the root of this project. Keep note of how it provides configuration for the following services:
|
||||
|
||||
* ``django``: your application running behind ``Gunicorn``;
|
||||
* ``postgres``: PostgreSQL database with the application's relational data;
|
||||
|
@ -55,7 +55,7 @@ You will probably also need to setup the Mail backend, for example by adding a `
|
|||
|
||||
.. warning::
|
||||
|
||||
.. include:: mailgun.rst
|
||||
.. include:: ../includes/mailgun.rst
|
||||
|
||||
|
||||
Optional: Use AWS IAM Role for EC2 instance
|
||||
|
@ -107,7 +107,7 @@ To solve this, you can either:
|
|||
2. create a ``.env`` file in the root of the project with just variables you need. You'll need to also define them in ``.envs/.production/.django`` (hence duplicating them).
|
||||
3. set these variables when running the build command::
|
||||
|
||||
DJANGO_AWS_S3_CUSTOM_DOMAIN=example.com docker compose -f production.yml build``.
|
||||
DJANGO_AWS_S3_CUSTOM_DOMAIN=example.com docker compose -f docker-compose.production.yml build``.
|
||||
|
||||
None of these options are ideal, we're open to suggestions on how to improve this. If you think you have one, please open an issue or a pull request.
|
||||
|
||||
|
@ -122,42 +122,42 @@ Building & Running Production Stack
|
|||
|
||||
You will need to build the stack first. To do that, run::
|
||||
|
||||
docker compose -f production.yml build
|
||||
docker compose -f docker-compose.production.yml build
|
||||
|
||||
Once this is ready, you can run it with::
|
||||
|
||||
docker compose -f production.yml up
|
||||
docker compose -f docker-compose.production.yml up
|
||||
|
||||
To run the stack and detach the containers, run::
|
||||
|
||||
docker compose -f production.yml up -d
|
||||
docker compose -f docker-compose.production.yml up -d
|
||||
|
||||
To run a migration, open up a second terminal and run::
|
||||
|
||||
docker compose -f production.yml run --rm django python manage.py migrate
|
||||
docker compose -f docker-compose.production.yml run --rm django python manage.py migrate
|
||||
|
||||
To create a superuser, run::
|
||||
|
||||
docker compose -f production.yml run --rm django python manage.py createsuperuser
|
||||
docker compose -f docker-compose.production.yml run --rm django python manage.py createsuperuser
|
||||
|
||||
If you need a shell, run::
|
||||
|
||||
docker compose -f production.yml run --rm django python manage.py shell
|
||||
docker compose -f docker-compose.production.yml run --rm django python manage.py shell
|
||||
|
||||
To check the logs out, run::
|
||||
|
||||
docker compose -f production.yml logs
|
||||
docker compose -f docker-compose.production.yml logs
|
||||
|
||||
If you want to scale your application, run::
|
||||
|
||||
docker compose -f production.yml up --scale django=4
|
||||
docker compose -f production.yml up --scale celeryworker=2
|
||||
docker compose -f docker-compose.production.yml up --scale django=4
|
||||
docker compose -f docker-compose.production.yml up --scale celeryworker=2
|
||||
|
||||
.. warning:: don't try to scale ``postgres``, ``celerybeat``, or ``traefik``.
|
||||
|
||||
To see how your containers are doing run::
|
||||
|
||||
docker compose -f production.yml ps
|
||||
docker compose -f docker-compose.production.yml ps
|
||||
|
||||
|
||||
Example: Supervisor
|
||||
|
@ -165,12 +165,12 @@ Example: Supervisor
|
|||
|
||||
Once you are ready with your initial setup, you want to make sure that your application is run by a process manager to
|
||||
survive reboots and auto restarts in case of an error. You can use the process manager you are most familiar with. All
|
||||
it needs to do is to run ``docker compose -f production.yml up`` in your projects root directory.
|
||||
it needs to do is to run ``docker compose -f docker-compose.production.yml up`` in your projects root directory.
|
||||
|
||||
If you are using ``supervisor``, you can use this file as a starting point::
|
||||
|
||||
[program:{{cookiecutter.project_slug}}]
|
||||
command=docker compose -f production.yml up
|
||||
command=docker compose -f docker-compose.production.yml up
|
||||
directory=/path/to/{{cookiecutter.project_slug}}
|
||||
redirect_stderr=true
|
||||
autostart=true
|
|
@ -1,14 +1,14 @@
|
|||
PostgreSQL Backups with Docker
|
||||
==============================
|
||||
|
||||
.. note:: For brevity it is assumed that you will be running the below commands against local environment, however, this is by no means mandatory so feel free to switch to ``production.yml`` when needed.
|
||||
.. note:: For brevity it is assumed that you will be running the below commands against local environment, however, this is by no means mandatory so feel free to switch to ``docker-compose.production.yml`` when needed.
|
||||
|
||||
|
||||
Prerequisites
|
||||
-------------
|
||||
|
||||
#. the project was generated with ``use_docker`` set to ``y``;
|
||||
#. the stack is up and running: ``docker compose -f local.yml up -d postgres``.
|
||||
#. the stack is up and running: ``docker compose -f docker-compose.local.yml up -d postgres``.
|
||||
|
||||
|
||||
Creating a Backup
|
||||
|
@ -16,7 +16,7 @@ Creating a Backup
|
|||
|
||||
To create a backup, run::
|
||||
|
||||
$ docker compose -f local.yml exec postgres backup
|
||||
$ docker compose -f docker-compose.local.yml exec postgres backup
|
||||
|
||||
Assuming your project's database is named ``my_project`` here is what you will see: ::
|
||||
|
||||
|
@ -31,7 +31,7 @@ Viewing the Existing Backups
|
|||
|
||||
To list existing backups, ::
|
||||
|
||||
$ docker compose -f local.yml exec postgres backups
|
||||
$ docker compose -f docker-compose.local.yml exec postgres backups
|
||||
|
||||
These are the sample contents of ``/backups``: ::
|
||||
|
||||
|
@ -55,9 +55,9 @@ With a single backup file copied to ``.`` that would be ::
|
|||
|
||||
$ docker cp 9c5c3f055843:/backups/backup_2018_03_13T09_05_07.sql.gz .
|
||||
|
||||
You can also get the container ID using ``docker compose -f local.yml ps -q postgres`` so if you want to automate your backups, you don't have to check the container ID manually every time. Here is the full command ::
|
||||
You can also get the container ID using ``docker compose -f docker-compose.local.yml ps -q postgres`` so if you want to automate your backups, you don't have to check the container ID manually every time. Here is the full command ::
|
||||
|
||||
$ docker cp $(docker compose -f local.yml ps -q postgres):/backups ./backups
|
||||
$ docker cp $(docker compose -f docker-compose.local.yml ps -q postgres):/backups ./backups
|
||||
|
||||
.. _`command`: https://docs.docker.com/engine/reference/commandline/cp/
|
||||
|
||||
|
@ -66,7 +66,7 @@ Restoring from the Existing Backup
|
|||
|
||||
To restore from one of the backups you have already got (take the ``backup_2018_03_13T09_05_07.sql.gz`` for example), ::
|
||||
|
||||
$ docker compose -f local.yml exec postgres restore backup_2018_03_13T09_05_07.sql.gz
|
||||
$ docker compose -f docker-compose.local.yml exec postgres restore backup_2018_03_13T09_05_07.sql.gz
|
||||
|
||||
You will see something like ::
|
||||
|
||||
|
@ -95,12 +95,33 @@ Backup to Amazon S3
|
|||
|
||||
For uploading your backups to Amazon S3 you can use the aws cli container. There is an upload command for uploading the postgres /backups directory recursively and there is a download command for downloading a specific backup. The default S3 environment variables are used. ::
|
||||
|
||||
$ docker compose -f production.yml run --rm awscli upload
|
||||
$ docker compose -f production.yml run --rm awscli download backup_2018_03_13T09_05_07.sql.gz
|
||||
$ docker compose -f docker-compose.production.yml run --rm awscli upload
|
||||
$ docker compose -f docker-compose.production.yml run --rm awscli download backup_2018_03_13T09_05_07.sql.gz
|
||||
|
||||
Remove Backup
|
||||
----------------------------------
|
||||
|
||||
To remove backup you can use the ``rmbackup`` command. This will remove the backup from the ``/backups`` directory. ::
|
||||
|
||||
$ docker compose -f local.yml exec postgres rmbackup backup_2018_03_13T09_05_07.sql.gz
|
||||
$ docker compose -f docker-compose.local.yml exec postgres rmbackup backup_2018_03_13T09_05_07.sql.gz
|
||||
|
||||
|
||||
Upgrading PostgreSQL
|
||||
----------------------------------
|
||||
|
||||
Upgrading PostgreSQL in your project requires a series of carefully executed steps. Start by halting all containers, excluding the postgres container. Following this, create a backup and proceed to remove the outdated data volume. ::
|
||||
|
||||
$ docker compose -f docker-compose.local.yml down
|
||||
$ docker compose -f docker-compose.local.yml up -d postgres
|
||||
$ docker compose -f docker-compose.local.yml run --rm postgres backup
|
||||
$ docker compose -f docker-compose.local.yml down
|
||||
$ docker volume rm my_project_postgres_data
|
||||
|
||||
.. note:: Neglecting to remove the old data volume may lead to issues, such as the new postgres container failing to start with errors like ``FATAL: database files are incompatible with server``, and ``could not translate host name "postgres" to address: Name or service not known``.
|
||||
|
||||
To complete the upgrade, update the PostgreSQL version in the corresponding Dockerfile (e.g. ``compose/production/postgres/Dockerfile``) and build a new version of PostgreSQL. ::
|
||||
|
||||
$ docker compose -f docker-compose.local.yml build postgres
|
||||
$ docker compose -f docker-compose.local.yml up -d postgres
|
||||
$ docker compose -f docker-compose.local.yml run --rm postgres restore backup_2018_03_13T09_05_07.sql.gz
|
||||
$ docker compose -f docker-compose.local.yml up -d
|
|
@ -11,7 +11,7 @@ After you have set up to `develop locally`_, run the following command from the
|
|||
|
||||
If you set up your project to `develop locally with docker`_, run the following command: ::
|
||||
|
||||
$ docker compose -f local.yml up docs
|
||||
$ docker compose -f docker-compose.docs.yml up
|
||||
|
||||
Navigate to port 9000 on your host to see the documentation. This will be opened automatically at `localhost`_ for local, non-docker development.
|
||||
|
33
docs/4-guides/linters.rst
Normal file
33
docs/4-guides/linters.rst
Normal file
|
@ -0,0 +1,33 @@
|
|||
Linters
|
||||
=======
|
||||
|
||||
.. index:: linters
|
||||
|
||||
|
||||
ruff
|
||||
------
|
||||
|
||||
Ruff is a Python linter and code formatter, written in Rust.
|
||||
It is a aggregation of flake8, pylint, pyupgrade and many more.
|
||||
|
||||
Ruff comes with a linter (``ruff check``) and a formatter (``ruff format``).
|
||||
The linter is a wrapper around flake8, pylint, and other linters,
|
||||
and the formatter is a wrapper around black, isort, and other formatters.
|
||||
|
||||
To run ruff without modifying your files: ::
|
||||
|
||||
$ ruff format --diff .
|
||||
$ ruff check .
|
||||
|
||||
Ruff is capable of fixing most of the problems it encounters.
|
||||
Be sure you commit first before running `ruff` so you can restore to a savepoint (and amend afterwards to prevent a double commit. : ::
|
||||
|
||||
$ ruff format .
|
||||
$ ruff check --fix .
|
||||
# be careful with the --unsafe-fixes option, it can break your code
|
||||
$ ruff check --fix --unsafe-fixes .
|
||||
|
||||
The config for ruff is located in pyproject.toml.
|
||||
On of the most important option is `tool.ruff.lint.select`.
|
||||
`select` determines which linters are run. In example, `DJ <https://docs.astral.sh/ruff/rules/#flake8-django-dj>`_ refers to flake8-django.
|
||||
For a full list of available linters, see `https://docs.astral.sh/ruff/rules/ <https://docs.astral.sh/ruff/rules/>`_
|
|
@ -19,7 +19,7 @@ You will get a readout of the `users` app that has already been set up with test
|
|||
|
||||
If you set up your project to `develop locally with docker`_, run the following command: ::
|
||||
|
||||
$ docker compose -f local.yml run --rm django pytest
|
||||
$ docker compose -f docker-compose.local.yml run --rm django pytest
|
||||
|
||||
Targeting particular apps for testing in ``docker`` follows a similar pattern as previously shown above.
|
||||
|
||||
|
@ -36,14 +36,14 @@ Once the tests are complete, in order to see the code coverage, run the followin
|
|||
|
||||
If you're running the project locally with Docker, use these commands instead: ::
|
||||
|
||||
$ docker compose -f local.yml run --rm django coverage run -m pytest
|
||||
$ docker compose -f local.yml run --rm django coverage report
|
||||
$ docker compose -f docker-compose.local.yml run --rm django coverage run -m pytest
|
||||
$ docker compose -f docker-compose.local.yml run --rm django coverage report
|
||||
|
||||
.. note::
|
||||
|
||||
At the root of the project folder, you will find the `pytest.ini` file. You can use this to customize_ the ``pytest`` to your liking.
|
||||
|
||||
There is also the `.coveragerc`. This is the configuration file for the ``coverage`` tool. You can find out more about `configuring`_ ``coverage``.
|
||||
The configuration for ``coverage`` can be found in ``pyproject.toml``. You can find out more about `configuring`_ ``coverage``.
|
||||
|
||||
.. seealso::
|
||||
|
|
@ -22,6 +22,6 @@ TODO
|
|||
Why doesn't this follow the layout from Two Scoops of Django?
|
||||
-------------------------------------------------------------
|
||||
|
||||
You may notice that some elements of this project do not exactly match what we describe in chapter 3 of `Two Scoops of Django 1.11`_. The reason for that is this project, amongst other things, serves as a test bed for trying out new ideas and concepts. Sometimes they work, sometimes they don't, but the end result is that it won't necessarily match precisely what is described in the book I co-authored.
|
||||
You may notice that some elements of this project do not exactly match what we describe in chapter 3 of `Two Scoops of Django 3.x`_. The reason for that is this project, amongst other things, serves as a test bed for trying out new ideas and concepts. Sometimes they work, sometimes they don't, but the end result is that it won't necessarily match precisely what is described in the book I co-authored.
|
||||
|
||||
.. _Two Scoops of Django 1.11: https://www.feldroy.com/collections/django/products/two-scoops-of-django-1-11
|
||||
.. _Two Scoops of Django 3.x: https://www.feldroy.com/two-scoops-press#two-scoops-of-django
|
|
@ -8,7 +8,7 @@ Server Error on sign-up/log-in
|
|||
|
||||
Make sure you have configured the mail backend (e.g. Mailgun) by adding the API key and sender domain
|
||||
|
||||
.. include:: mailgun.rst
|
||||
.. include:: ../includes/mailgun.rst
|
||||
|
||||
.. _docker-postgres-auth-failed:
|
||||
|
||||
|
@ -30,7 +30,7 @@ If you recreate the project multiple times with the same name, Docker would pres
|
|||
|
||||
To fix this, you can either:
|
||||
|
||||
- Clear your project-related Docker cache with ``docker compose -f local.yml down --volumes --rmi all``.
|
||||
- Clear your project-related Docker cache with ``docker compose -f docker-compose.local.yml down --volumes --rmi all``.
|
||||
- Use the Docker volume sub-commands to find volumes (`ls`_) and remove them (`rm`_).
|
||||
- Use the `prune`_ command to clear system-wide (use with care!).
|
||||
|
3
docs/6-about/contributing.md
Normal file
3
docs/6-about/contributing.md
Normal file
|
@ -0,0 +1,3 @@
|
|||
```{include} ../../CONTRIBUTING.md
|
||||
|
||||
```
|
104
docs/6-about/maintainer-guide.md
Normal file
104
docs/6-about/maintainer-guide.md
Normal file
|
@ -0,0 +1,104 @@
|
|||
# Maintainer guide
|
||||
|
||||
This document is intended for maintainers of the template.
|
||||
|
||||
## Automated updates
|
||||
|
||||
We use 2 separate services to keep our dependencies up-to-date:
|
||||
|
||||
- Dependabot, which manages updates of Python deps of the template, GitHub actions, npm packages and Docker images.
|
||||
- PyUp, which manages the Python deps for the generated project.
|
||||
|
||||
We don't use Dependabot for the generated project deps because our requirements files are templated, and Dependabot fails to parse them. PyUp is -AFAIK- the only service out there that supports having Jinja tags in the requirements file.
|
||||
|
||||
Updates for the template should be labelled as `project infrastructure` while the ones about the generated project should be labelled as `update`. This is use to work in conjunction with our changelog script (see later).
|
||||
|
||||
## Automation scripts
|
||||
|
||||
We have a few workflows which have been automated over time. They usually run using GitHub actions and might need a few small manual actions to work nicely. Some have a few limitations which we should document here.
|
||||
|
||||
### CI
|
||||
|
||||
`ci.yml`
|
||||
|
||||
The CI workflow tries to cover 2 main aspects of the template:
|
||||
|
||||
- Check all combinations to make sure that valid files are generated with no major linting issues. Issues which are fixed by an auto-formatter after generation aren't considered major, and only aim for best effort. This is under the `test` job.
|
||||
- Run more in-depth tests on a few combinations, by installing dependencies, running type checker and the test suite of the generated project. We try to cover docker (`docker` job) and non-docker (`bare` job) setups.
|
||||
|
||||
We also run the deployment checks, but we don't do much more beyond that for testing the production setup.
|
||||
|
||||
### Django issue checker
|
||||
|
||||
`django-issue-checker.yml`
|
||||
|
||||
This workflow runs daily, on schedule, and checks if there is a new major version of Django (not in the pure SemVer sense) released that we are not running, and list our dependencies compatibility.
|
||||
|
||||
For example, at time of writing, we use Django 4.2, but the latest version of Django is 5.0, so the workflow created a ["Django 5.0" issue](https://github.com/cookiecutter/cookiecutter-django/issues/4724) in GitHub, with a compatibility table and keeps it up to date every day.
|
||||
|
||||
#### Limitations
|
||||
|
||||
Here are a few current and past limitations of the script
|
||||
|
||||
- When a new dependency is added to the template, the script fails to update an existing issue
|
||||
- Not sure what happens when a deps is removed
|
||||
- ~~Unable to parse classifiers without minor version~~
|
||||
- ~~Creates an issue even if we are on the latest version~~
|
||||
|
||||
### Issue manager
|
||||
|
||||
`issue-manager.yml`
|
||||
|
||||
A workflow that uses [Sebastian Ramirez' issue-manager](https://github.com/tiangolo/issue-manager) to help us automate issue management. The tag line from the repo explains it well:
|
||||
|
||||
> Automatically close issues or Pull Requests that have a label, after a custom delay, if no one replies back.
|
||||
|
||||
It runs on a schedule as well as when some actions are taken on issues and pull requests.
|
||||
|
||||
We wait 10 days before closing issues, and we have a few customised reasons, which are configured in the workflow itself. The config should be fairly self-explanatory.
|
||||
|
||||
### Pre-commit auto-update
|
||||
|
||||
`pre-commit-autoupdate.yml`
|
||||
|
||||
Run daily, to do `pre-commit autoupdate` on the template as well as the generated project, and opens a pull request with the changes.
|
||||
|
||||
#### Limitations
|
||||
|
||||
- The PR is open as GitHub action which means that CI does NOT run. The documentation for create-pull-request action [explains why](https://github.com/peter-evans/create-pull-request/blob/main/docs/concepts-guidelines.md#triggering-further-workflow-runs).
|
||||
- Some hooks are also installed as local dependencies (via `requirements/local.txt`), but these are updated separately via PyUP.
|
||||
|
||||
### Update changelog
|
||||
|
||||
`update-changelog.yml`
|
||||
|
||||
Run daily at 2AM to update our changelog and create a GitHub release. This runs a custom script which:
|
||||
|
||||
- List all pull requests merged the day before
|
||||
- The release name is calendar based, so `YYYY.MM.DD`
|
||||
- For each PR:
|
||||
- Get the PR title to summarize the change
|
||||
- Look at the PR labels to classify it in a section of the release notes:
|
||||
- anything labelled `project infrastructure` is excluded
|
||||
- label `update` goes in section "Updated"
|
||||
- label `bug` goes in section "Fixed"
|
||||
- label `docs` goes in section "Documentation"
|
||||
- Default to section "Changed"
|
||||
|
||||
With that in mind, when merging changes, it's a good idea to set the labels and rename the PR title to give a good summary of the change, in the context of the changelog.
|
||||
|
||||
#### Limitations
|
||||
|
||||
- Dependabot updates for npm & Docker have a verbose title, try to rename them to be more readable: `Bump webpack-dev-server from 4.15.1 to 5.0.2 in /{{cookiecutter.project_slug}}` -> `Bump webpack-dev-server to 5.0.2`
|
||||
- ~~Dependencies updates for the template repo (tox, cookiecutter, etc...) don't need to appear in changelog, and need to be labelled as `project infrastructure` manually. By default, they come from PyUp labelled as `update`.~~
|
||||
|
||||
### Update contributors
|
||||
|
||||
`update-contributors.yml`
|
||||
|
||||
Runs on each push to master branch. List the 5 most recently merged pull requests and extract their author. If any of the authors is a new one, updates the `.github/contributors.json`, regenerate the `CONTRIBUTORS.md` from it, and push back the changes to master.
|
||||
|
||||
#### Limitations
|
||||
|
||||
- If you merge a pull request from a new contributor, and merge another one right after, the push to master will fail as the remote will be out of date.
|
||||
- If you merge more than 5 pull requests in a row like this, the new contributor might fail to be added.
|
142
docs/Makefile
142
docs/Makefile
|
@ -4,150 +4,30 @@
|
|||
# You can set these variables from the command line.
|
||||
SPHINXOPTS =
|
||||
SPHINXBUILD = sphinx-build
|
||||
PAPER =
|
||||
SOURCEDIR = .
|
||||
BUILDDIR = _build
|
||||
|
||||
# Internal variables.
|
||||
PAPEROPT_a4 = -D latex_paper_size=a4
|
||||
PAPEROPT_letter = -D latex_paper_size=letter
|
||||
ALLSPHINXOPTS = -d $(BUILDDIR)/doctrees $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
||||
# the i18n builder cannot share the environment and doctrees with the others
|
||||
I18NSPHINXOPTS = $(PAPEROPT_$(PAPER)) $(SPHINXOPTS) .
|
||||
|
||||
.PHONY: help clean html dirhtml singlehtml pickle json htmlhelp qthelp devhelp epub latex latexpdf text man changes linkcheck doctest gettext
|
||||
.PHONY: help clean html livehtml linkcheck
|
||||
|
||||
help:
|
||||
@echo "Please use \`make <target>' where <target> is one of"
|
||||
@echo " html to make standalone HTML files"
|
||||
@echo " dirhtml to make HTML files named index.html in directories"
|
||||
@echo " singlehtml to make a single large HTML file"
|
||||
@echo " pickle to make pickle files"
|
||||
@echo " json to make JSON files"
|
||||
@echo " htmlhelp to make HTML files and a HTML help project"
|
||||
@echo " qthelp to make HTML files and a qthelp project"
|
||||
@echo " devhelp to make HTML files and a Devhelp project"
|
||||
@echo " epub to make an epub"
|
||||
@echo " latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter"
|
||||
@echo " latexpdf to make LaTeX files and run them through pdflatex"
|
||||
@echo " text to make text files"
|
||||
@echo " man to make manual pages"
|
||||
@echo " texinfo to make Texinfo files"
|
||||
@echo " info to make Texinfo files and run them through makeinfo"
|
||||
@echo " gettext to make PO message catalogs"
|
||||
@echo " changes to make an overview of all changed/added/deprecated items"
|
||||
@echo " linkcheck to check all external links for integrity"
|
||||
@echo " doctest to run all doctests embedded in the documentation (if enabled)"
|
||||
@awk '/^#/{c=substr($$0,3);next}c&&/^[[:alpha:]][[:alnum:]_-]+:/{print substr($$1,1,index($$1,":")),c}1{c=0}' $(MAKEFILE_LIST) | column -s: -t
|
||||
|
||||
# Clean the build output
|
||||
clean:
|
||||
-rm -rf $(BUILDDIR)/*
|
||||
|
||||
# Build the HTML docs
|
||||
html:
|
||||
$(SPHINXBUILD) -b html $(ALLSPHINXOPTS) $(BUILDDIR)/html
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/html."
|
||||
$(SPHINXBUILD) -b html $(SPHINXOPTS) $(SOURCEDIR) $(BUILDDIR)/html
|
||||
|
||||
dirhtml:
|
||||
$(SPHINXBUILD) -b dirhtml $(ALLSPHINXOPTS) $(BUILDDIR)/dirhtml
|
||||
@echo
|
||||
@echo "Build finished. The HTML pages are in $(BUILDDIR)/dirhtml."
|
||||
|
||||
singlehtml:
|
||||
$(SPHINXBUILD) -b singlehtml $(ALLSPHINXOPTS) $(BUILDDIR)/singlehtml
|
||||
@echo
|
||||
@echo "Build finished. The HTML page is in $(BUILDDIR)/singlehtml."
|
||||
|
||||
pickle:
|
||||
$(SPHINXBUILD) -b pickle $(ALLSPHINXOPTS) $(BUILDDIR)/pickle
|
||||
@echo
|
||||
@echo "Build finished; now you can process the pickle files."
|
||||
|
||||
json:
|
||||
$(SPHINXBUILD) -b json $(ALLSPHINXOPTS) $(BUILDDIR)/json
|
||||
@echo
|
||||
@echo "Build finished; now you can process the JSON files."
|
||||
|
||||
htmlhelp:
|
||||
$(SPHINXBUILD) -b htmlhelp $(ALLSPHINXOPTS) $(BUILDDIR)/htmlhelp
|
||||
@echo
|
||||
@echo "Build finished; now you can run HTML Help Workshop with the" \
|
||||
".hhp project file in $(BUILDDIR)/htmlhelp."
|
||||
|
||||
qthelp:
|
||||
$(SPHINXBUILD) -b qthelp $(ALLSPHINXOPTS) $(BUILDDIR)/qthelp
|
||||
@echo
|
||||
@echo "Build finished; now you can run "qcollectiongenerator" with the" \
|
||||
".qhcp project file in $(BUILDDIR)/qthelp, like this:"
|
||||
@echo "# qcollectiongenerator $(BUILDDIR)/qthelp/{{ cookiecutter.project_slug }}.qhcp"
|
||||
@echo "To view the help file:"
|
||||
@echo "# assistant -collectionFile $(BUILDDIR)/qthelp/{{ cookiecutter.project_slug }}.qhc"
|
||||
|
||||
devhelp:
|
||||
$(SPHINXBUILD) -b devhelp $(ALLSPHINXOPTS) $(BUILDDIR)/devhelp
|
||||
@echo
|
||||
@echo "Build finished."
|
||||
@echo "To view the help file:"
|
||||
@echo "# mkdir -p $$HOME/.local/share/devhelp/{{ cookiecutter.project_slug }}"
|
||||
@echo "# ln -s $(BUILDDIR)/devhelp $$HOME/.local/share/devhelp/{{ cookiecutter.project_slug }}"
|
||||
@echo "# devhelp"
|
||||
|
||||
epub:
|
||||
$(SPHINXBUILD) -b epub $(ALLSPHINXOPTS) $(BUILDDIR)/epub
|
||||
@echo
|
||||
@echo "Build finished. The epub file is in $(BUILDDIR)/epub."
|
||||
|
||||
latex:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo
|
||||
@echo "Build finished; the LaTeX files are in $(BUILDDIR)/latex."
|
||||
@echo "Run \`make' in that directory to run these through (pdf)latex" \
|
||||
"(use \`make latexpdf' here to do that automatically)."
|
||||
|
||||
latexpdf:
|
||||
$(SPHINXBUILD) -b latex $(ALLSPHINXOPTS) $(BUILDDIR)/latex
|
||||
@echo "Running LaTeX files through pdflatex..."
|
||||
$(MAKE) -C $(BUILDDIR)/latex all-pdf
|
||||
@echo "pdflatex finished; the PDF files are in $(BUILDDIR)/latex."
|
||||
|
||||
text:
|
||||
$(SPHINXBUILD) -b text $(ALLSPHINXOPTS) $(BUILDDIR)/text
|
||||
@echo
|
||||
@echo "Build finished. The text files are in $(BUILDDIR)/text."
|
||||
|
||||
man:
|
||||
$(SPHINXBUILD) -b man $(ALLSPHINXOPTS) $(BUILDDIR)/man
|
||||
@echo
|
||||
@echo "Build finished. The manual pages are in $(BUILDDIR)/man."
|
||||
|
||||
texinfo:
|
||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||
@echo
|
||||
@echo "Build finished. The Texinfo files are in $(BUILDDIR)/texinfo."
|
||||
@echo "Run \`make' in that directory to run these through makeinfo" \
|
||||
"(use \`make info' here to do that automatically)."
|
||||
|
||||
info:
|
||||
$(SPHINXBUILD) -b texinfo $(ALLSPHINXOPTS) $(BUILDDIR)/texinfo
|
||||
@echo "Running Texinfo files through makeinfo..."
|
||||
make -C $(BUILDDIR)/texinfo info
|
||||
@echo "makeinfo finished; the Info files are in $(BUILDDIR)/texinfo."
|
||||
|
||||
gettext:
|
||||
$(SPHINXBUILD) -b gettext $(I18NSPHINXOPTS) $(BUILDDIR)/locale
|
||||
@echo
|
||||
@echo "Build finished. The message catalogs are in $(BUILDDIR)/locale."
|
||||
|
||||
changes:
|
||||
$(SPHINXBUILD) -b changes $(ALLSPHINXOPTS) $(BUILDDIR)/changes
|
||||
@echo
|
||||
@echo "The overview file is in $(BUILDDIR)/changes."
|
||||
# Build and serve docs with live reload
|
||||
livehtml:
|
||||
sphinx-autobuild -b html --port 9000 --watch . -c . $(SOURCEDIR) $(BUILDDIR)/html
|
||||
|
||||
# Check all external links for integrity
|
||||
linkcheck:
|
||||
$(SPHINXBUILD) -b linkcheck $(ALLSPHINXOPTS) $(BUILDDIR)/linkcheck
|
||||
$(SPHINXBUILD) -b linkcheck $(SPHINXOPTS) $(SOURCEDIR) $(BUILDDIR)/linkcheck
|
||||
@echo
|
||||
@echo "Link check complete; look for any errors in the above output " \
|
||||
"or in $(BUILDDIR)/linkcheck/output.txt."
|
||||
|
||||
doctest:
|
||||
$(SPHINXBUILD) -b doctest $(ALLSPHINXOPTS) $(BUILDDIR)/doctest
|
||||
@echo "Testing of doctests in the sources finished, look at the " \
|
||||
"results in $(BUILDDIR)/doctest/output.txt."
|
||||
|
|
11
docs/conf.py
11
docs/conf.py
|
@ -29,7 +29,10 @@ extensions = ["myst_parser"]
|
|||
templates_path = ["_templates"]
|
||||
|
||||
# The suffix of source filenames.
|
||||
source_suffix = [".rst", ".md"]
|
||||
source_suffix = {
|
||||
".rst": "restructuredtext",
|
||||
".md": "markdown",
|
||||
}
|
||||
|
||||
# The encoding of source files.
|
||||
# source_encoding = 'utf-8-sig'
|
||||
|
@ -185,7 +188,7 @@ latex_documents = [
|
|||
"cookiecutter-django Documentation",
|
||||
"cookiecutter-django",
|
||||
"manual",
|
||||
)
|
||||
),
|
||||
]
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top of
|
||||
|
@ -220,7 +223,7 @@ man_pages = [
|
|||
"Cookiecutter Django documentation",
|
||||
["Daniel Roy Greenfeld"],
|
||||
1,
|
||||
)
|
||||
),
|
||||
]
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
|
@ -239,7 +242,7 @@ texinfo_documents = [
|
|||
"Cookiecutter Django documentation",
|
||||
"Daniel Roy Greenfeld",
|
||||
"Cookiecutter Django",
|
||||
"A Cookiecutter template for creating production-ready " "Django projects quickly.",
|
||||
"A Cookiecutter template for creating production-ready Django projects quickly.",
|
||||
"Miscellaneous",
|
||||
)
|
||||
]
|
||||
|
|
|
@ -1,3 +0,0 @@
|
|||
```{include} ../CONTRIBUTING.md
|
||||
|
||||
```
|
|
@ -7,27 +7,51 @@ Powered by Cookiecutter_, Cookiecutter Django is a project template for jumpstar
|
|||
|
||||
.. _cookiecutter: https://github.com/cookiecutter/cookiecutter
|
||||
|
||||
Contents
|
||||
--------
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: Getting Started
|
||||
|
||||
1-getting-started/project-generation-options
|
||||
1-getting-started/settings
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: Local Development
|
||||
|
||||
project-generation-options
|
||||
developing-locally
|
||||
developing-locally-docker
|
||||
settings
|
||||
linters
|
||||
testing
|
||||
document
|
||||
deployment-on-pythonanywhere
|
||||
deployment-on-heroku
|
||||
deployment-with-docker
|
||||
docker-postgres-backups
|
||||
websocket
|
||||
faq
|
||||
troubleshooting
|
||||
contributing
|
||||
2-local-development/developing-locally
|
||||
2-local-development/developing-locally-docker
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: Deployment
|
||||
|
||||
3-deployment/deployment-on-pythonanywhere
|
||||
3-deployment/deployment-on-heroku
|
||||
3-deployment/deployment-with-docker
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: Guides
|
||||
|
||||
4-guides/docker-postgres-backups
|
||||
4-guides/linters
|
||||
4-guides/testing
|
||||
4-guides/document
|
||||
4-guides/websocket
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: Help
|
||||
|
||||
5-help/faq
|
||||
5-help/troubleshooting
|
||||
|
||||
.. toctree::
|
||||
:maxdepth: 2
|
||||
:caption: About
|
||||
|
||||
6-about/contributing
|
||||
6-about/maintainer-guide
|
||||
|
||||
Indices and tables
|
||||
------------------
|
||||
|
|
|
@ -1,43 +0,0 @@
|
|||
Linters
|
||||
=======
|
||||
|
||||
.. index:: linters
|
||||
|
||||
|
||||
flake8
|
||||
------
|
||||
|
||||
To run flake8: ::
|
||||
|
||||
$ flake8
|
||||
|
||||
The config for flake8 is located in setup.cfg. It specifies:
|
||||
|
||||
* Set max line length to 120 chars
|
||||
* Exclude ``.tox,.git,*/migrations/*,*/static/CACHE/*,docs,node_modules``
|
||||
|
||||
pylint
|
||||
------
|
||||
|
||||
To run pylint: ::
|
||||
|
||||
$ pylint <python files that you wish to lint>
|
||||
|
||||
The config for pylint is located in .pylintrc. It specifies:
|
||||
|
||||
* Use the pylint_django plugin. If using Celery, also use pylint_celery.
|
||||
* Set max line length to 120 chars
|
||||
* Disable linting messages for missing docstring and invalid name
|
||||
* max-parents=13
|
||||
|
||||
pycodestyle
|
||||
-----------
|
||||
|
||||
This is included in flake8's checks, but you can also run it separately to see a more detailed report: ::
|
||||
|
||||
$ pycodestyle <python files that you wish to lint>
|
||||
|
||||
The config for pycodestyle is located in setup.cfg. It specifies:
|
||||
|
||||
* Set max line length to 120 chars
|
||||
* Exclude ``.tox,.git,*/migrations/*,*/static/CACHE/*,docs,node_modules``
|
153
docs/make.bat
153
docs/make.bat
|
@ -5,36 +5,20 @@ REM Command file for Sphinx documentation
|
|||
if "%SPHINXBUILD%" == "" (
|
||||
set SPHINXBUILD=sphinx-build
|
||||
)
|
||||
set SOURCEDIR=.
|
||||
set BUILDDIR=_build
|
||||
set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS% .
|
||||
set ALLSPHINXOPTS=-d %BUILDDIR%/doctrees %SPHINXOPTS%
|
||||
set I18NSPHINXOPTS=%SPHINXOPTS% .
|
||||
if NOT "%PAPER%" == "" (
|
||||
set ALLSPHINXOPTS=-D latex_paper_size=%PAPER% %ALLSPHINXOPTS%
|
||||
set I18NSPHINXOPTS=-D latex_paper_size=%PAPER% %I18NSPHINXOPTS%
|
||||
)
|
||||
|
||||
if "%1" == "" goto help
|
||||
|
||||
if "%1" == "help" (
|
||||
:help
|
||||
echo.Please use `make ^<target^>` where ^<target^> is one of
|
||||
echo. clean to clean the build directory
|
||||
echo. html to make standalone HTML files
|
||||
echo. dirhtml to make HTML files named index.html in directories
|
||||
echo. singlehtml to make a single large HTML file
|
||||
echo. pickle to make pickle files
|
||||
echo. json to make JSON files
|
||||
echo. htmlhelp to make HTML files and a HTML help project
|
||||
echo. qthelp to make HTML files and a qthelp project
|
||||
echo. devhelp to make HTML files and a Devhelp project
|
||||
echo. epub to make an epub
|
||||
echo. latex to make LaTeX files, you can set PAPER=a4 or PAPER=letter
|
||||
echo. text to make text files
|
||||
echo. man to make manual pages
|
||||
echo. texinfo to make Texinfo files
|
||||
echo. gettext to make PO message catalogs
|
||||
echo. changes to make an overview over all changed/added/deprecated items
|
||||
echo. livehtml to build and serve docs with live reload
|
||||
echo. linkcheck to check all external links for integrity
|
||||
echo. doctest to run all doctests embedded in the documentation if enabled
|
||||
goto end
|
||||
)
|
||||
|
||||
|
@ -45,132 +29,22 @@ if "%1" == "clean" (
|
|||
)
|
||||
|
||||
if "%1" == "html" (
|
||||
%SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
|
||||
%SPHINXBUILD% -b html %ALLSPHINXOPTS% %SOURCEDIR% %BUILDDIR%/html
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The HTML pages are in %BUILDDIR%/html.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "dirhtml" (
|
||||
%SPHINXBUILD% -b dirhtml %ALLSPHINXOPTS% %BUILDDIR%/dirhtml
|
||||
if "%1" == "livehtml" (
|
||||
sphinx-autobuild -b html --port 9000 --watch . -c . %SOURCEDIR% %BUILDDIR%/html
|
||||
%SPHINXBUILD% -b html %ALLSPHINXOPTS% %BUILDDIR%/html
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The HTML pages are in %BUILDDIR%/dirhtml.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "singlehtml" (
|
||||
%SPHINXBUILD% -b singlehtml %ALLSPHINXOPTS% %BUILDDIR%/singlehtml
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The HTML pages are in %BUILDDIR%/singlehtml.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "pickle" (
|
||||
%SPHINXBUILD% -b pickle %ALLSPHINXOPTS% %BUILDDIR%/pickle
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; now you can process the pickle files.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "json" (
|
||||
%SPHINXBUILD% -b json %ALLSPHINXOPTS% %BUILDDIR%/json
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; now you can process the JSON files.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "htmlhelp" (
|
||||
%SPHINXBUILD% -b htmlhelp %ALLSPHINXOPTS% %BUILDDIR%/htmlhelp
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; now you can run HTML Help Workshop with the ^
|
||||
.hhp project file in %BUILDDIR%/htmlhelp.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "qthelp" (
|
||||
%SPHINXBUILD% -b qthelp %ALLSPHINXOPTS% %BUILDDIR%/qthelp
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; now you can run "qcollectiongenerator" with the ^
|
||||
.qhcp project file in %BUILDDIR%/qthelp, like this:
|
||||
echo.^> qcollectiongenerator %BUILDDIR%\qthelp\{{ cookiecutter.project_slug }}.qhcp
|
||||
echo.To view the help file:
|
||||
echo.^> assistant -collectionFile %BUILDDIR%\qthelp\{{ cookiecutter.project_slug }}.ghc
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "devhelp" (
|
||||
%SPHINXBUILD% -b devhelp %ALLSPHINXOPTS% %BUILDDIR%/devhelp
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "epub" (
|
||||
%SPHINXBUILD% -b epub %ALLSPHINXOPTS% %BUILDDIR%/epub
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The epub file is in %BUILDDIR%/epub.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "latex" (
|
||||
%SPHINXBUILD% -b latex %ALLSPHINXOPTS% %BUILDDIR%/latex
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished; the LaTeX files are in %BUILDDIR%/latex.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "text" (
|
||||
%SPHINXBUILD% -b text %ALLSPHINXOPTS% %BUILDDIR%/text
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The text files are in %BUILDDIR%/text.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "man" (
|
||||
%SPHINXBUILD% -b man %ALLSPHINXOPTS% %BUILDDIR%/man
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The manual pages are in %BUILDDIR%/man.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "texinfo" (
|
||||
%SPHINXBUILD% -b texinfo %ALLSPHINXOPTS% %BUILDDIR%/texinfo
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The Texinfo files are in %BUILDDIR%/texinfo.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "gettext" (
|
||||
%SPHINXBUILD% -b gettext %I18NSPHINXOPTS% %BUILDDIR%/locale
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Build finished. The message catalogs are in %BUILDDIR%/locale.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "changes" (
|
||||
%SPHINXBUILD% -b changes %ALLSPHINXOPTS% %BUILDDIR%/changes
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.The overview file is in %BUILDDIR%/changes.
|
||||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "linkcheck" (
|
||||
%SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %BUILDDIR%/linkcheck
|
||||
%SPHINXBUILD% -b linkcheck %ALLSPHINXOPTS% %SOURCEDIR% %BUILDDIR%/linkcheck
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Link check complete; look for any errors in the above output ^
|
||||
|
@ -178,13 +52,4 @@ or in %BUILDDIR%/linkcheck/output.txt.
|
|||
goto end
|
||||
)
|
||||
|
||||
if "%1" == "doctest" (
|
||||
%SPHINXBUILD% -b doctest %ALLSPHINXOPTS% %BUILDDIR%/doctest
|
||||
if errorlevel 1 exit /b 1
|
||||
echo.
|
||||
echo.Testing of doctests in the sources finished, look at the ^
|
||||
results in %BUILDDIR%/doctest/output.txt.
|
||||
goto end
|
||||
)
|
||||
|
||||
:end
|
||||
|
|
|
@ -1,3 +0,0 @@
|
|||
sphinx==7.2.6
|
||||
sphinx-rtd-theme==2.0.0
|
||||
myst-parser==2.0.0
|
|
@ -1,20 +1,8 @@
|
|||
"""
|
||||
NOTE:
|
||||
the below code is to be maintained Python 2.x-compatible
|
||||
as the whole Cookiecutter Django project initialization
|
||||
can potentially be run in Python 2.x environment
|
||||
(at least so we presume in `pre_gen_project.py`).
|
||||
|
||||
TODO: restrict Cookiecutter Django project initialization to
|
||||
Python 3.x environments only
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
import json
|
||||
import os
|
||||
import random
|
||||
import shutil
|
||||
import string
|
||||
from pathlib import Path
|
||||
|
||||
try:
|
||||
# Inspired by
|
||||
|
@ -36,40 +24,28 @@ DEBUG_VALUE = "debug"
|
|||
def remove_open_source_files():
|
||||
file_names = ["CONTRIBUTORS.txt", "LICENSE"]
|
||||
for file_name in file_names:
|
||||
os.remove(file_name)
|
||||
Path(file_name).unlink()
|
||||
|
||||
|
||||
def remove_gplv3_files():
|
||||
file_names = ["COPYING"]
|
||||
for file_name in file_names:
|
||||
os.remove(file_name)
|
||||
Path(file_name).unlink()
|
||||
|
||||
|
||||
def remove_custom_user_manager_files():
|
||||
os.remove(
|
||||
os.path.join(
|
||||
"{{cookiecutter.project_slug}}",
|
||||
"users",
|
||||
"managers.py",
|
||||
)
|
||||
)
|
||||
os.remove(
|
||||
os.path.join(
|
||||
"{{cookiecutter.project_slug}}",
|
||||
"users",
|
||||
"tests",
|
||||
"test_managers.py",
|
||||
)
|
||||
)
|
||||
users_path = Path("{{cookiecutter.project_slug}}", "users")
|
||||
(users_path / "managers.py").unlink()
|
||||
(users_path / "tests" / "test_managers.py").unlink()
|
||||
|
||||
|
||||
def remove_pycharm_files():
|
||||
idea_dir_path = ".idea"
|
||||
if os.path.exists(idea_dir_path):
|
||||
idea_dir_path = Path(".idea")
|
||||
if idea_dir_path.exists():
|
||||
shutil.rmtree(idea_dir_path)
|
||||
|
||||
docs_dir_path = os.path.join("docs", "pycharm")
|
||||
if os.path.exists(docs_dir_path):
|
||||
docs_dir_path = Path("docs", "pycharm")
|
||||
if docs_dir_path.exists():
|
||||
shutil.rmtree(docs_dir_path)
|
||||
|
||||
|
||||
|
@ -77,13 +53,22 @@ def remove_docker_files():
|
|||
shutil.rmtree(".devcontainer")
|
||||
shutil.rmtree("compose")
|
||||
|
||||
file_names = ["local.yml", "production.yml", ".dockerignore"]
|
||||
file_names = [
|
||||
"docker-compose.local.yml",
|
||||
"docker-compose.production.yml",
|
||||
".dockerignore",
|
||||
"justfile",
|
||||
]
|
||||
for file_name in file_names:
|
||||
os.remove(file_name)
|
||||
Path(file_name).unlink()
|
||||
if "{{ cookiecutter.editor }}" == "PyCharm":
|
||||
file_names = ["docker_compose_up_django.xml", "docker_compose_up_docs.xml"]
|
||||
for file_name in file_names:
|
||||
os.remove(os.path.join(".idea", "runConfigurations", file_name))
|
||||
Path(".idea", "runConfigurations", file_name).unlink()
|
||||
|
||||
|
||||
def remove_nginx_docker_files():
|
||||
shutil.rmtree(Path("compose", "production", "nginx"))
|
||||
|
||||
|
||||
def remove_utility_files():
|
||||
|
@ -91,23 +76,23 @@ def remove_utility_files():
|
|||
|
||||
|
||||
def remove_heroku_files():
|
||||
file_names = ["Procfile", "runtime.txt", "requirements.txt"]
|
||||
file_names = ["Procfile", "requirements.txt"]
|
||||
for file_name in file_names:
|
||||
if file_name == "requirements.txt" and "{{ cookiecutter.ci_tool }}".lower() == "travis":
|
||||
# don't remove the file if we are using travisci but not using heroku
|
||||
continue
|
||||
os.remove(file_name)
|
||||
Path(file_name).unlink()
|
||||
shutil.rmtree("bin")
|
||||
|
||||
|
||||
def remove_sass_files():
|
||||
shutil.rmtree(os.path.join("{{cookiecutter.project_slug}}", "static", "sass"))
|
||||
shutil.rmtree(Path("{{cookiecutter.project_slug}}", "static", "sass"))
|
||||
|
||||
|
||||
def remove_gulp_files():
|
||||
file_names = ["gulpfile.js"]
|
||||
file_names = ["gulpfile.mjs"]
|
||||
for file_name in file_names:
|
||||
os.remove(file_name)
|
||||
Path(file_name).unlink()
|
||||
|
||||
|
||||
def remove_webpack_files():
|
||||
|
@ -116,36 +101,30 @@ def remove_webpack_files():
|
|||
|
||||
|
||||
def remove_vendors_js():
|
||||
vendors_js_path = os.path.join(
|
||||
"{{ cookiecutter.project_slug }}",
|
||||
"static",
|
||||
"js",
|
||||
"vendors.js",
|
||||
)
|
||||
if os.path.exists(vendors_js_path):
|
||||
os.remove(vendors_js_path)
|
||||
vendors_js_path = Path("{{ cookiecutter.project_slug }}", "static", "js", "vendors.js")
|
||||
if vendors_js_path.exists():
|
||||
vendors_js_path.unlink()
|
||||
|
||||
|
||||
def remove_packagejson_file():
|
||||
file_names = ["package.json"]
|
||||
for file_name in file_names:
|
||||
os.remove(file_name)
|
||||
Path(file_name).unlink()
|
||||
|
||||
|
||||
def update_package_json(remove_dev_deps=None, remove_keys=None, scripts=None):
|
||||
remove_dev_deps = remove_dev_deps or []
|
||||
remove_keys = remove_keys or []
|
||||
scripts = scripts or {}
|
||||
with open("package.json", mode="r") as fd:
|
||||
content = json.load(fd)
|
||||
package_json = Path("package.json")
|
||||
content = json.loads(package_json.read_text())
|
||||
for package_name in remove_dev_deps:
|
||||
content["devDependencies"].pop(package_name)
|
||||
for key in remove_keys:
|
||||
content.pop(key)
|
||||
content["scripts"].update(scripts)
|
||||
with open("package.json", mode="w") as fd:
|
||||
json.dump(content, fd, ensure_ascii=False, indent=2)
|
||||
fd.write("\n")
|
||||
updated_content = json.dumps(content, ensure_ascii=False, indent=2) + "\n"
|
||||
package_json.write_text(updated_content)
|
||||
|
||||
|
||||
def handle_js_runner(choice, use_docker, use_async):
|
||||
|
@ -170,7 +149,7 @@ def handle_js_runner(choice, use_docker, use_async):
|
|||
remove_keys=["babel"],
|
||||
scripts={
|
||||
"dev": "gulp",
|
||||
"build": "gulp generate-assets",
|
||||
"build": "gulp build",
|
||||
},
|
||||
)
|
||||
remove_webpack_files()
|
||||
|
@ -209,8 +188,8 @@ def handle_js_runner(choice, use_docker, use_async):
|
|||
|
||||
|
||||
def remove_prettier_pre_commit():
|
||||
with open(".pre-commit-config.yaml", "r") as fd:
|
||||
content = fd.readlines()
|
||||
pre_commit_yaml = Path(".pre-commit-config.yaml")
|
||||
content = pre_commit_yaml.read_text().splitlines()
|
||||
|
||||
removing = False
|
||||
new_lines = []
|
||||
|
@ -222,35 +201,34 @@ def remove_prettier_pre_commit():
|
|||
if not removing:
|
||||
new_lines.append(line)
|
||||
|
||||
with open(".pre-commit-config.yaml", "w") as fd:
|
||||
fd.writelines(new_lines)
|
||||
pre_commit_yaml.write_text("\n".join(new_lines))
|
||||
|
||||
|
||||
def remove_celery_files():
|
||||
file_names = [
|
||||
os.path.join("config", "celery_app.py"),
|
||||
os.path.join("{{ cookiecutter.project_slug }}", "users", "tasks.py"),
|
||||
os.path.join("{{ cookiecutter.project_slug }}", "users", "tests", "test_tasks.py"),
|
||||
file_paths = [
|
||||
Path("config", "celery_app.py"),
|
||||
Path("{{ cookiecutter.project_slug }}", "users", "tasks.py"),
|
||||
Path("{{ cookiecutter.project_slug }}", "users", "tests", "test_tasks.py"),
|
||||
]
|
||||
for file_name in file_names:
|
||||
os.remove(file_name)
|
||||
for file_path in file_paths:
|
||||
file_path.unlink()
|
||||
|
||||
|
||||
def remove_async_files():
|
||||
file_names = [
|
||||
os.path.join("config", "asgi.py"),
|
||||
os.path.join("config", "websocket.py"),
|
||||
file_paths = [
|
||||
Path("config", "asgi.py"),
|
||||
Path("config", "websocket.py"),
|
||||
]
|
||||
for file_name in file_names:
|
||||
os.remove(file_name)
|
||||
for file_path in file_paths:
|
||||
file_path.unlink()
|
||||
|
||||
|
||||
def remove_dottravisyml_file():
|
||||
os.remove(".travis.yml")
|
||||
Path(".travis.yml").unlink()
|
||||
|
||||
|
||||
def remove_dotgitlabciyml_file():
|
||||
os.remove(".gitlab-ci.yml")
|
||||
Path(".gitlab-ci.yml").unlink()
|
||||
|
||||
|
||||
def remove_dotgithub_folder():
|
||||
|
@ -258,7 +236,7 @@ def remove_dotgithub_folder():
|
|||
|
||||
|
||||
def remove_dotdrone_file():
|
||||
os.remove(".drone.yml")
|
||||
Path(".drone.yml").unlink()
|
||||
|
||||
|
||||
def generate_random_string(length, using_digits=False, using_ascii_letters=False, using_punctuation=False):
|
||||
|
@ -284,7 +262,7 @@ def generate_random_string(length, using_digits=False, using_ascii_letters=False
|
|||
return "".join([random.choice(symbols) for _ in range(length)])
|
||||
|
||||
|
||||
def set_flag(file_path, flag, value=None, formatted=None, *args, **kwargs):
|
||||
def set_flag(file_path: Path, flag, value=None, formatted=None, *args, **kwargs):
|
||||
if value is None:
|
||||
random_string = generate_random_string(*args, **kwargs)
|
||||
if random_string is None:
|
||||
|
@ -297,7 +275,7 @@ def set_flag(file_path, flag, value=None, formatted=None, *args, **kwargs):
|
|||
random_string = formatted.format(random_string)
|
||||
value = random_string
|
||||
|
||||
with open(file_path, "r+") as f:
|
||||
with file_path.open("r+") as f:
|
||||
file_contents = f.read().replace(flag, value)
|
||||
f.seek(0)
|
||||
f.write(file_contents)
|
||||
|
@ -306,7 +284,7 @@ def set_flag(file_path, flag, value=None, formatted=None, *args, **kwargs):
|
|||
return value
|
||||
|
||||
|
||||
def set_django_secret_key(file_path):
|
||||
def set_django_secret_key(file_path: Path):
|
||||
django_secret_key = set_flag(
|
||||
file_path,
|
||||
"!!!SET DJANGO_SECRET_KEY!!!",
|
||||
|
@ -317,7 +295,7 @@ def set_django_secret_key(file_path):
|
|||
return django_secret_key
|
||||
|
||||
|
||||
def set_django_admin_url(file_path):
|
||||
def set_django_admin_url(file_path: Path):
|
||||
django_admin_url = set_flag(
|
||||
file_path,
|
||||
"!!!SET DJANGO_ADMIN_URL!!!",
|
||||
|
@ -372,16 +350,16 @@ def set_celery_flower_password(file_path, value=None):
|
|||
|
||||
|
||||
def append_to_gitignore_file(ignored_line):
|
||||
with open(".gitignore", "a") as gitignore_file:
|
||||
with Path(".gitignore").open("a") as gitignore_file:
|
||||
gitignore_file.write(ignored_line)
|
||||
gitignore_file.write("\n")
|
||||
|
||||
|
||||
def set_flags_in_envs(postgres_user, celery_flower_user, debug=False):
|
||||
local_django_envs_path = os.path.join(".envs", ".local", ".django")
|
||||
production_django_envs_path = os.path.join(".envs", ".production", ".django")
|
||||
local_postgres_envs_path = os.path.join(".envs", ".local", ".postgres")
|
||||
production_postgres_envs_path = os.path.join(".envs", ".production", ".postgres")
|
||||
local_django_envs_path = Path(".envs", ".local", ".django")
|
||||
production_django_envs_path = Path(".envs", ".production", ".django")
|
||||
local_postgres_envs_path = Path(".envs", ".local", ".postgres")
|
||||
production_postgres_envs_path = Path(".envs", ".production", ".postgres")
|
||||
|
||||
set_django_secret_key(production_django_envs_path)
|
||||
set_django_admin_url(production_django_envs_path)
|
||||
|
@ -398,39 +376,33 @@ def set_flags_in_envs(postgres_user, celery_flower_user, debug=False):
|
|||
|
||||
|
||||
def set_flags_in_settings_files():
|
||||
set_django_secret_key(os.path.join("config", "settings", "local.py"))
|
||||
set_django_secret_key(os.path.join("config", "settings", "test.py"))
|
||||
set_django_secret_key(Path("config", "settings", "local.py"))
|
||||
set_django_secret_key(Path("config", "settings", "test.py"))
|
||||
|
||||
|
||||
def remove_envs_and_associated_files():
|
||||
shutil.rmtree(".envs")
|
||||
os.remove("merge_production_dotenvs_in_dotenv.py")
|
||||
Path("merge_production_dotenvs_in_dotenv.py").unlink()
|
||||
shutil.rmtree("tests")
|
||||
|
||||
|
||||
def remove_celery_compose_dirs():
|
||||
shutil.rmtree(os.path.join("compose", "local", "django", "celery"))
|
||||
shutil.rmtree(os.path.join("compose", "production", "django", "celery"))
|
||||
shutil.rmtree(Path("compose", "local", "django", "celery"))
|
||||
shutil.rmtree(Path("compose", "production", "django", "celery"))
|
||||
|
||||
|
||||
def remove_node_dockerfile():
|
||||
shutil.rmtree(os.path.join("compose", "local", "node"))
|
||||
shutil.rmtree(Path("compose", "local", "node"))
|
||||
|
||||
|
||||
def remove_aws_dockerfile():
|
||||
shutil.rmtree(os.path.join("compose", "production", "aws"))
|
||||
shutil.rmtree(Path("compose", "production", "aws"))
|
||||
|
||||
|
||||
def remove_drf_starter_files():
|
||||
os.remove(os.path.join("config", "api_router.py"))
|
||||
shutil.rmtree(os.path.join("{{cookiecutter.project_slug}}", "users", "api"))
|
||||
os.remove(os.path.join("{{cookiecutter.project_slug}}", "users", "tests", "test_drf_urls.py"))
|
||||
os.remove(os.path.join("{{cookiecutter.project_slug}}", "users", "tests", "test_drf_views.py"))
|
||||
os.remove(os.path.join("{{cookiecutter.project_slug}}", "users", "tests", "test_swagger.py"))
|
||||
|
||||
|
||||
def remove_storages_module():
|
||||
os.remove(os.path.join("{{cookiecutter.project_slug}}", "utils", "storages.py"))
|
||||
Path("config", "api_router.py").unlink()
|
||||
shutil.rmtree(Path("{{cookiecutter.project_slug}}", "users", "api"))
|
||||
shutil.rmtree(Path("{{cookiecutter.project_slug}}", "users", "tests", "api"))
|
||||
|
||||
|
||||
def main():
|
||||
|
@ -456,6 +428,8 @@ def main():
|
|||
|
||||
if "{{ cookiecutter.use_docker }}".lower() == "y":
|
||||
remove_utility_files()
|
||||
if "{{ cookiecutter.cloud_provider }}".lower() != "none":
|
||||
remove_nginx_docker_files()
|
||||
else:
|
||||
remove_docker_files()
|
||||
|
||||
|
@ -499,7 +473,6 @@ def main():
|
|||
WARNING + "You chose to not use any cloud providers nor Docker, "
|
||||
"media files won't be served in production." + TERMINATOR
|
||||
)
|
||||
remove_storages_module()
|
||||
|
||||
if "{{ cookiecutter.use_celery }}".lower() == "n":
|
||||
remove_celery_files()
|
||||
|
|
|
@ -1,14 +1,3 @@
|
|||
"""
|
||||
NOTE:
|
||||
the below code is to be maintained Python 2.x-compatible
|
||||
as the whole Cookiecutter Django project initialization
|
||||
can potentially be run in Python 2.x environment.
|
||||
|
||||
TODO: restrict Cookiecutter Django project initialization
|
||||
to Python 3.x environments only
|
||||
"""
|
||||
from __future__ import print_function
|
||||
|
||||
import sys
|
||||
|
||||
TERMINATOR = "\x1b[0m"
|
||||
|
@ -33,36 +22,10 @@ assert project_slug == project_slug.lower(), "'{}' project slug should be all lo
|
|||
|
||||
assert "\\" not in "{{ cookiecutter.author_name }}", "Don't include backslashes in author name."
|
||||
|
||||
if "{{ cookiecutter.use_docker }}".lower() == "n":
|
||||
python_major_version = sys.version_info[0]
|
||||
if python_major_version == 2:
|
||||
print(
|
||||
WARNING + "You're running cookiecutter under Python 2, but the generated "
|
||||
"project requires Python 3.11+. Do you want to proceed (y/n)? " + TERMINATOR
|
||||
)
|
||||
yes_options, no_options = frozenset(["y"]), frozenset(["n"])
|
||||
while True:
|
||||
choice = raw_input().lower() # noqa: F821
|
||||
if choice in yes_options:
|
||||
break
|
||||
|
||||
elif choice in no_options:
|
||||
print(INFO + "Generation process stopped as requested." + TERMINATOR)
|
||||
sys.exit(1)
|
||||
else:
|
||||
print(
|
||||
HINT
|
||||
+ "Please respond with {} or {}: ".format(
|
||||
", ".join(["'{}'".format(o) for o in yes_options if not o == ""]),
|
||||
", ".join(["'{}'".format(o) for o in no_options if not o == ""]),
|
||||
)
|
||||
+ TERMINATOR
|
||||
)
|
||||
|
||||
if "{{ cookiecutter.use_whitenoise }}".lower() == "n" and "{{ cookiecutter.cloud_provider }}" == "None":
|
||||
print("You should either use Whitenoise or select a " "Cloud Provider to serve static files")
|
||||
print("You should either use Whitenoise or select a Cloud Provider to serve static files")
|
||||
sys.exit(1)
|
||||
|
||||
if "{{ cookiecutter.mail_service }}" == "Amazon SES" and "{{ cookiecutter.cloud_provider }}" != "AWS":
|
||||
print("You should either use AWS or select a different " "Mail Service for sending emails.")
|
||||
print("You should either use AWS or select a different Mail Service for sending emails.")
|
||||
sys.exit(1)
|
||||
|
|
100
pyproject.toml
100
pyproject.toml
|
@ -1,35 +1,99 @@
|
|||
# ==== pytest ====
|
||||
[tool.pytest.ini_options]
|
||||
addopts = "-v --tb=short"
|
||||
norecursedirs = [
|
||||
".tox",
|
||||
".git",
|
||||
"*/migrations/*",
|
||||
"*/static/*",
|
||||
"docs",
|
||||
"venv",
|
||||
"*/{{cookiecutter.project_slug}}/*",
|
||||
[project]
|
||||
name = "cookiecutter-django"
|
||||
version = "2025.04.08"
|
||||
description = "A Cookiecutter template for creating production-ready Django projects quickly."
|
||||
readme = "README.md"
|
||||
keywords = [
|
||||
"cookiecutter",
|
||||
"django",
|
||||
"project template",
|
||||
"scaffolding",
|
||||
"skeleton",
|
||||
]
|
||||
license = { text = "BSD" }
|
||||
authors = [
|
||||
{ name = "Daniel Roy Greenfeld", email = "pydanny@gmail.com" },
|
||||
]
|
||||
requires-python = ">=3.12,<3.13"
|
||||
classifiers = [
|
||||
"Development Status :: 4 - Beta",
|
||||
"Environment :: Console",
|
||||
"Framework :: Django :: 5.0",
|
||||
"Intended Audience :: Developers",
|
||||
"License :: OSI Approved :: BSD License",
|
||||
"Natural Language :: English",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 3 :: Only",
|
||||
"Programming Language :: Python :: 3.12",
|
||||
"Programming Language :: Python :: Implementation :: CPython",
|
||||
"Topic :: Software Development",
|
||||
]
|
||||
dependencies = [
|
||||
"binaryornot==0.4.4",
|
||||
"cookiecutter==2.6",
|
||||
"django-upgrade==1.22.2",
|
||||
"djlint==1.36.4",
|
||||
"gitpython==3.1.43",
|
||||
"jinja2==3.1.5",
|
||||
"pre-commit==4.1.0",
|
||||
"pygithub==2.5",
|
||||
"pytest==8.3.4",
|
||||
"pytest-cookies==0.7",
|
||||
"pytest-instafail==0.5",
|
||||
"pytest-xdist==3.6.1",
|
||||
"pyyaml==6.0.2",
|
||||
"requests==2.32.3",
|
||||
"ruff==0.11.4",
|
||||
"sh==2.1; sys_platform!='win23'",
|
||||
"tox==4.23.2",
|
||||
"tox-uv>=1.17",
|
||||
]
|
||||
urls = { Repository = "https://github.com/cookiecutter/cookiecutter-django" }
|
||||
|
||||
[dependency-groups]
|
||||
docs = [
|
||||
"myst-parser>=4",
|
||||
"sphinx>=8.0.2",
|
||||
"sphinx-autobuild>=2024.10.3",
|
||||
"sphinx-rtd-theme>=3",
|
||||
]
|
||||
|
||||
|
||||
# ==== black ====
|
||||
[tool.black]
|
||||
line-length = 119
|
||||
target-version = ['py311']
|
||||
|
||||
target-version = [
|
||||
'py312',
|
||||
]
|
||||
|
||||
# ==== isort ====
|
||||
|
||||
[tool.isort]
|
||||
profile = "black"
|
||||
line_length = 119
|
||||
known_first_party = [
|
||||
"tests",
|
||||
"scripts",
|
||||
"hooks",
|
||||
"tests",
|
||||
"scripts",
|
||||
"hooks",
|
||||
]
|
||||
|
||||
[tool.pyproject-fmt]
|
||||
keep_full_version = true
|
||||
|
||||
# ==== pytest ====
|
||||
|
||||
[tool.pytest.ini_options]
|
||||
addopts = "-v --tb=short"
|
||||
norecursedirs = [
|
||||
".tox",
|
||||
".git",
|
||||
"*/migrations/*",
|
||||
"*/static/*",
|
||||
"docs",
|
||||
"venv",
|
||||
"*/{{cookiecutter.project_slug}}/*",
|
||||
]
|
||||
|
||||
# ==== djLint ====
|
||||
|
||||
[tool.djlint]
|
||||
blank_line_after_tag = "load,extends"
|
||||
close_void_tags = true
|
||||
|
|
|
@ -1,28 +0,0 @@
|
|||
cookiecutter==2.5.0
|
||||
sh==2.0.6; sys_platform != "win32"
|
||||
binaryornot==0.4.4
|
||||
|
||||
# Code quality
|
||||
# ------------------------------------------------------------------------------
|
||||
black==23.12.1
|
||||
isort==5.13.2
|
||||
flake8==7.0.0
|
||||
django-upgrade==1.15.0
|
||||
djlint==1.34.1
|
||||
pre-commit==3.6.0
|
||||
|
||||
# Testing
|
||||
# ------------------------------------------------------------------------------
|
||||
tox==4.12.0
|
||||
pytest==7.4.4
|
||||
pytest-xdist==3.5.0
|
||||
pytest-cookies==0.7.0
|
||||
pytest-instafail==0.5.0
|
||||
pyyaml==6.0.1
|
||||
|
||||
# Scripting
|
||||
# ------------------------------------------------------------------------------
|
||||
PyGithub==2.1.1
|
||||
gitpython==3.1.41
|
||||
jinja2==3.1.3
|
||||
requests==2.31.0
|
|
@ -6,12 +6,12 @@ patches, only comparing major and minor version numbers.
|
|||
This script handles when there are multiple Django versions that need
|
||||
to keep up to date.
|
||||
"""
|
||||
|
||||
from __future__ import annotations
|
||||
|
||||
import os
|
||||
import re
|
||||
import sys
|
||||
from collections.abc import Iterable
|
||||
from pathlib import Path
|
||||
from typing import TYPE_CHECKING, Any, NamedTuple
|
||||
|
||||
|
@ -19,6 +19,8 @@ import requests
|
|||
from github import Github
|
||||
|
||||
if TYPE_CHECKING:
|
||||
from collections.abc import Iterable
|
||||
|
||||
from github.Issue import Issue
|
||||
|
||||
CURRENT_FILE = Path(__file__)
|
||||
|
@ -82,7 +84,7 @@ def get_name_and_version(requirements_line: str) -> tuple[str, ...]:
|
|||
|
||||
|
||||
def get_all_latest_django_versions(
|
||||
django_max_version: tuple[DjVersion] = None,
|
||||
django_max_version: tuple[DjVersion] | None = None,
|
||||
) -> tuple[DjVersion, list[DjVersion]]:
|
||||
"""
|
||||
Grabs all Django versions that are worthy of a GitHub issue.
|
||||
|
@ -220,8 +222,7 @@ class GitHubManager:
|
|||
if supported_dj_versions:
|
||||
if any(v >= needed_dj_version for v in supported_dj_versions):
|
||||
return package_info["info"]["version"], "✅"
|
||||
else:
|
||||
return "", "❌"
|
||||
return "", "❌"
|
||||
|
||||
# Django classifier DNE; assume it isn't a Django lib
|
||||
# Great exceptions include pylint-django, where we need to do this manually...
|
||||
|
|
69
scripts/node_version.py
Normal file
69
scripts/node_version.py
Normal file
|
@ -0,0 +1,69 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import json
|
||||
from pathlib import Path
|
||||
|
||||
ROOT = Path(__file__).parent.parent
|
||||
TEMPLATED_ROOT = ROOT / "{{cookiecutter.project_slug}}"
|
||||
DOCKERFILE = TEMPLATED_ROOT / "compose" / "local" / "node" / "Dockerfile"
|
||||
PROD_DOCKERFILE = TEMPLATED_ROOT / "compose" / "production" / "django" / "Dockerfile"
|
||||
PACKAGE_JSON = TEMPLATED_ROOT / "package.json"
|
||||
CI_YML = ROOT / ".github" / "workflows" / "ci.yml"
|
||||
|
||||
|
||||
def main() -> None:
|
||||
new_version = get_version_from_dockerfile()
|
||||
old_version = get_version_from_package_json()
|
||||
if old_version != new_version:
|
||||
update_package_json_version(old_version, new_version)
|
||||
update_ci_node_version(old_version, new_version)
|
||||
update_production_node_version(old_version, new_version)
|
||||
|
||||
|
||||
def get_version_from_dockerfile() -> str:
|
||||
# Extract version out of base image name:
|
||||
# FROM docker.io/node:22.13-bookworm-slim
|
||||
# -> 22.13
|
||||
with DOCKERFILE.open("r") as f:
|
||||
for line in f:
|
||||
if "FROM docker.io/node:" in line:
|
||||
_, _, docker_tag = line.partition(":")
|
||||
version_str, _, _ = docker_tag.partition("-")
|
||||
return version_str
|
||||
raise RuntimeError("Could not find version in Dockerfile")
|
||||
|
||||
|
||||
def get_version_from_package_json() -> str:
|
||||
package_json = json.loads(PACKAGE_JSON.read_text())
|
||||
return package_json["engines"]["node"]
|
||||
|
||||
|
||||
def update_package_json_version(old_version: str, new_version: str) -> None:
|
||||
package_json_text = PACKAGE_JSON.read_text()
|
||||
package_json_text = package_json_text.replace(
|
||||
f'"node": "{old_version}"',
|
||||
f'"node": "{new_version}"',
|
||||
)
|
||||
PACKAGE_JSON.write_text(package_json_text)
|
||||
|
||||
|
||||
def update_ci_node_version(old_version: str, new_version: str) -> None:
|
||||
yml_content = CI_YML.read_text()
|
||||
yml_content = yml_content.replace(
|
||||
f'node-version: "{old_version}"',
|
||||
f'node-version: "{new_version}"',
|
||||
)
|
||||
CI_YML.write_text(yml_content)
|
||||
|
||||
|
||||
def update_production_node_version(old_version: str, new_version: str) -> None:
|
||||
dockerfile_content = PROD_DOCKERFILE.read_text()
|
||||
dockerfile_content = dockerfile_content.replace(
|
||||
f"FROM docker.io/node:{old_version}",
|
||||
f"FROM docker.io/node:{new_version}",
|
||||
)
|
||||
PROD_DOCKERFILE.write_text(dockerfile_content)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
56
scripts/ruff_version.py
Normal file
56
scripts/ruff_version.py
Normal file
|
@ -0,0 +1,56 @@
|
|||
from __future__ import annotations
|
||||
|
||||
import subprocess
|
||||
import tomllib
|
||||
from pathlib import Path
|
||||
|
||||
ROOT = Path(__file__).parent.parent
|
||||
TEMPLATED_ROOT = ROOT / "{{cookiecutter.project_slug}}"
|
||||
REQUIREMENTS_LOCAL_TXT = TEMPLATED_ROOT / "requirements" / "local.txt"
|
||||
PRE_COMMIT_CONFIG = TEMPLATED_ROOT / ".pre-commit-config.yaml"
|
||||
PYPROJECT_TOML = ROOT / "pyproject.toml"
|
||||
|
||||
|
||||
def main() -> None:
|
||||
new_version = get_requirements_txt_version()
|
||||
old_version = get_pyproject_toml_version()
|
||||
if old_version == new_version:
|
||||
return
|
||||
|
||||
update_ruff_version(old_version, new_version)
|
||||
subprocess.run(["uv", "lock", "--no-upgrade"], cwd=ROOT)
|
||||
|
||||
|
||||
def get_requirements_txt_version() -> str:
|
||||
content = REQUIREMENTS_LOCAL_TXT.read_text()
|
||||
for line in content.split("\n"):
|
||||
if line.startswith("ruff"):
|
||||
return line.split(" ")[0].split("==")[1]
|
||||
raise RuntimeError("Could not find ruff version in requirements/local.txt")
|
||||
|
||||
|
||||
def get_pyproject_toml_version() -> str:
|
||||
data = tomllib.loads(PYPROJECT_TOML.read_text())
|
||||
for dependency in data["project"]["dependencies"]:
|
||||
if dependency.startswith("ruff=="):
|
||||
return dependency.split("==")[1]
|
||||
raise RuntimeError("Could not find ruff version in pyproject.toml")
|
||||
|
||||
|
||||
def update_ruff_version(old_version: str, new_version: str) -> None:
|
||||
# Update pyproject.toml
|
||||
new_content = PYPROJECT_TOML.read_text().replace(
|
||||
f"ruff=={old_version}",
|
||||
f"ruff=={new_version}",
|
||||
)
|
||||
PYPROJECT_TOML.write_text(new_content)
|
||||
# Update pre-commit config
|
||||
new_content = PRE_COMMIT_CONFIG.read_text().replace(
|
||||
f"repo: https://github.com/astral-sh/ruff-pre-commit\n rev: v{old_version}",
|
||||
f"repo: https://github.com/astral-sh/ruff-pre-commit\n rev: v{new_version}",
|
||||
)
|
||||
PRE_COMMIT_CONFIG.write_text(new_content)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
|
@ -1,6 +1,7 @@
|
|||
import datetime as dt
|
||||
import os
|
||||
import re
|
||||
import subprocess
|
||||
from collections.abc import Iterable
|
||||
from pathlib import Path
|
||||
|
||||
|
@ -47,12 +48,16 @@ def main() -> None:
|
|||
print(f"Wrote {changelog_path}")
|
||||
|
||||
# Update version
|
||||
setup_py_path = ROOT / "setup.py"
|
||||
setup_py_path = ROOT / "pyproject.toml"
|
||||
update_version(setup_py_path, release)
|
||||
print(f"Updated version in {setup_py_path}")
|
||||
|
||||
# Run uv lock
|
||||
uv_lock_path = ROOT / "uv.lock"
|
||||
subprocess.run(["uv", "lock", "--no-upgrade"], cwd=ROOT)
|
||||
|
||||
# Commit changes, create tag and push
|
||||
update_git_repo([changelog_path, setup_py_path], release)
|
||||
update_git_repo([changelog_path, setup_py_path, uv_lock_path], release)
|
||||
|
||||
# Create GitHub release
|
||||
github_release = repo.create_git_release(
|
||||
|
@ -124,7 +129,7 @@ def write_changelog(file_path: Path, release: str, content: str) -> None:
|
|||
|
||||
|
||||
def update_version(file_path: Path, release: str) -> None:
|
||||
"""Update template version in setup.py."""
|
||||
"""Update template version in pyproject.toml."""
|
||||
old_content = file_path.read_text()
|
||||
updated_content = re.sub(
|
||||
r'\nversion = "\d+\.\d+\.\d+"\n',
|
||||
|
|
|
@ -40,8 +40,8 @@ def iter_recent_authors():
|
|||
"""
|
||||
Fetch users who opened recently merged pull requests.
|
||||
|
||||
Use Github API to fetch recent authors rather than
|
||||
git CLI to work with Github usernames.
|
||||
Use GitHub API to fetch recent authors rather than
|
||||
git CLI to work with GitHub usernames.
|
||||
"""
|
||||
repo = Github(login_or_token=GITHUB_TOKEN, per_page=5).get_repo(GITHUB_REPO)
|
||||
recent_pulls = repo.get_pulls(state="closed", sort="updated", direction="desc").get_page(0)
|
||||
|
|
41
setup.py
41
setup.py
|
@ -1,41 +0,0 @@
|
|||
#!/usr/bin/env python
|
||||
try:
|
||||
from setuptools import setup
|
||||
except ImportError:
|
||||
from distutils.core import setup
|
||||
|
||||
# We use calendar versioning
|
||||
version = "2024.01.16"
|
||||
|
||||
with open("README.md") as readme_file:
|
||||
long_description = readme_file.read()
|
||||
|
||||
setup(
|
||||
name="cookiecutter-django",
|
||||
version=version,
|
||||
description=("A Cookiecutter template for creating production-ready " "Django projects quickly."),
|
||||
long_description=long_description,
|
||||
author="Daniel Roy Greenfeld",
|
||||
author_email="pydanny@gmail.com",
|
||||
url="https://github.com/cookiecutter/cookiecutter-django",
|
||||
packages=[],
|
||||
license="BSD",
|
||||
zip_safe=False,
|
||||
classifiers=[
|
||||
"Development Status :: 4 - Beta",
|
||||
"Environment :: Console",
|
||||
"Framework :: Django :: 4.2",
|
||||
"Intended Audience :: Developers",
|
||||
"Natural Language :: English",
|
||||
"License :: OSI Approved :: BSD License",
|
||||
"Programming Language :: Python",
|
||||
"Programming Language :: Python :: 3",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
"Programming Language :: Python :: Implementation :: CPython",
|
||||
"Topic :: Software Development",
|
||||
],
|
||||
keywords=(
|
||||
"cookiecutter, Python, projects, project templates, django, "
|
||||
"skeleton, scaffolding, project directory, setup.py"
|
||||
),
|
||||
)
|
|
@ -11,7 +11,7 @@ mkdir -p .cache/bare
|
|||
cd .cache/bare
|
||||
|
||||
# create the project using the default settings in cookiecutter.json
|
||||
cookiecutter ../../ --no-input --overwrite-if-exists use_docker=n "$@"
|
||||
uv run cookiecutter ../../ --no-input --overwrite-if-exists use_docker=n "$@"
|
||||
cd my_awesome_project
|
||||
|
||||
# Install OS deps
|
||||
|
|
|
@ -2,6 +2,8 @@ import glob
|
|||
import os
|
||||
import re
|
||||
import sys
|
||||
from collections.abc import Iterable
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
|
@ -57,12 +59,10 @@ SUPPORTED_COMBINATIONS = [
|
|||
{"editor": "VS Code"},
|
||||
{"use_docker": "y"},
|
||||
{"use_docker": "n"},
|
||||
{"postgresql_version": "16"},
|
||||
{"postgresql_version": "15"},
|
||||
{"postgresql_version": "14"},
|
||||
{"postgresql_version": "13"},
|
||||
{"postgresql_version": "12"},
|
||||
{"postgresql_version": "11"},
|
||||
{"postgresql_version": "10"},
|
||||
{"cloud_provider": "AWS", "use_whitenoise": "y"},
|
||||
{"cloud_provider": "AWS", "use_whitenoise": "n"},
|
||||
{"cloud_provider": "GCP", "use_whitenoise": "y"},
|
||||
|
@ -74,7 +74,7 @@ SUPPORTED_COMBINATIONS = [
|
|||
{"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "Mandrill"},
|
||||
{"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "Postmark"},
|
||||
{"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "Sendgrid"},
|
||||
{"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "SendinBlue"},
|
||||
{"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "Brevo"},
|
||||
{"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "SparkPost"},
|
||||
{"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "Other SMTP"},
|
||||
# Note: cloud_provider=None AND use_whitenoise=n is not supported
|
||||
|
@ -84,7 +84,7 @@ SUPPORTED_COMBINATIONS = [
|
|||
{"cloud_provider": "AWS", "mail_service": "Mandrill"},
|
||||
{"cloud_provider": "AWS", "mail_service": "Postmark"},
|
||||
{"cloud_provider": "AWS", "mail_service": "Sendgrid"},
|
||||
{"cloud_provider": "AWS", "mail_service": "SendinBlue"},
|
||||
{"cloud_provider": "AWS", "mail_service": "Brevo"},
|
||||
{"cloud_provider": "AWS", "mail_service": "SparkPost"},
|
||||
{"cloud_provider": "AWS", "mail_service": "Other SMTP"},
|
||||
{"cloud_provider": "GCP", "mail_service": "Mailgun"},
|
||||
|
@ -92,7 +92,7 @@ SUPPORTED_COMBINATIONS = [
|
|||
{"cloud_provider": "GCP", "mail_service": "Mandrill"},
|
||||
{"cloud_provider": "GCP", "mail_service": "Postmark"},
|
||||
{"cloud_provider": "GCP", "mail_service": "Sendgrid"},
|
||||
{"cloud_provider": "GCP", "mail_service": "SendinBlue"},
|
||||
{"cloud_provider": "GCP", "mail_service": "Brevo"},
|
||||
{"cloud_provider": "GCP", "mail_service": "SparkPost"},
|
||||
{"cloud_provider": "GCP", "mail_service": "Other SMTP"},
|
||||
{"cloud_provider": "Azure", "mail_service": "Mailgun"},
|
||||
|
@ -100,7 +100,7 @@ SUPPORTED_COMBINATIONS = [
|
|||
{"cloud_provider": "Azure", "mail_service": "Mandrill"},
|
||||
{"cloud_provider": "Azure", "mail_service": "Postmark"},
|
||||
{"cloud_provider": "Azure", "mail_service": "Sendgrid"},
|
||||
{"cloud_provider": "Azure", "mail_service": "SendinBlue"},
|
||||
{"cloud_provider": "Azure", "mail_service": "Brevo"},
|
||||
{"cloud_provider": "Azure", "mail_service": "SparkPost"},
|
||||
{"cloud_provider": "Azure", "mail_service": "Other SMTP"},
|
||||
# Note: cloud_providers GCP, Azure, and None
|
||||
|
@ -147,19 +147,19 @@ def _fixture_id(ctx):
|
|||
return "-".join(f"{key}:{value}" for key, value in ctx.items())
|
||||
|
||||
|
||||
def build_files_list(base_dir):
|
||||
def build_files_list(base_path: Path):
|
||||
"""Build a list containing absolute paths to the generated files."""
|
||||
return [os.path.join(dirpath, file_path) for dirpath, subdirs, files in os.walk(base_dir) for file_path in files]
|
||||
return [dirpath / file_path for dirpath, subdirs, files in base_path.walk() for file_path in files]
|
||||
|
||||
|
||||
def check_paths(paths):
|
||||
def check_paths(paths: Iterable[Path]):
|
||||
"""Method to check all paths have correct substitutions."""
|
||||
# Assert that no match is found in any of the files
|
||||
for path in paths:
|
||||
if is_binary(path):
|
||||
if is_binary(str(path)):
|
||||
continue
|
||||
|
||||
for line in open(path):
|
||||
for line in path.open():
|
||||
match = RE_OBJ.search(line)
|
||||
assert match is None, f"cookiecutter variable not replaced in {path}"
|
||||
|
||||
|
@ -174,34 +174,31 @@ def test_project_generation(cookies, context, context_override):
|
|||
assert result.project_path.name == context["project_slug"]
|
||||
assert result.project_path.is_dir()
|
||||
|
||||
paths = build_files_list(str(result.project_path))
|
||||
paths = build_files_list(result.project_path)
|
||||
assert paths
|
||||
check_paths(paths)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("context_override", SUPPORTED_COMBINATIONS, ids=_fixture_id)
|
||||
def test_flake8_passes(cookies, context_override):
|
||||
"""Generated project should pass flake8."""
|
||||
def test_ruff_check_passes(cookies, context_override):
|
||||
"""Generated project should pass ruff check."""
|
||||
result = cookies.bake(extra_context=context_override)
|
||||
|
||||
try:
|
||||
sh.flake8(_cwd=str(result.project_path))
|
||||
sh.ruff("check", ".", _cwd=str(result.project_path))
|
||||
except sh.ErrorReturnCode as e:
|
||||
pytest.fail(e.stdout.decode())
|
||||
|
||||
|
||||
@auto_fixable
|
||||
@pytest.mark.parametrize("context_override", SUPPORTED_COMBINATIONS, ids=_fixture_id)
|
||||
def test_black_passes(cookies, context_override):
|
||||
"""Check whether generated project passes black style."""
|
||||
def test_ruff_format_passes(cookies, context_override):
|
||||
"""Check whether generated project passes ruff format."""
|
||||
result = cookies.bake(extra_context=context_override)
|
||||
|
||||
try:
|
||||
sh.black(
|
||||
"--check",
|
||||
"--diff",
|
||||
"--exclude",
|
||||
"migrations",
|
||||
sh.ruff(
|
||||
"format",
|
||||
".",
|
||||
_cwd=str(result.project_path),
|
||||
)
|
||||
|
@ -234,7 +231,7 @@ def test_django_upgrade_passes(cookies, context_override):
|
|||
try:
|
||||
sh.django_upgrade(
|
||||
"--target-version",
|
||||
"4.2",
|
||||
"5.0",
|
||||
*python_files,
|
||||
_cwd=str(result.project_path),
|
||||
)
|
||||
|
@ -251,7 +248,13 @@ def test_djlint_lint_passes(cookies, context_override):
|
|||
# TODO: remove T002 when fixed https://github.com/Riverside-Healthcare/djLint/issues/687
|
||||
ignored_rules = "H006,H030,H031,T002"
|
||||
try:
|
||||
sh.djlint("--lint", "--ignore", f"{autofixable_rules},{ignored_rules}", ".", _cwd=str(result.project_path))
|
||||
sh.djlint(
|
||||
"--lint",
|
||||
"--ignore",
|
||||
f"{autofixable_rules},{ignored_rules}",
|
||||
".",
|
||||
_cwd=str(result.project_path),
|
||||
)
|
||||
except sh.ErrorReturnCode as e:
|
||||
pytest.fail(e.stdout.decode())
|
||||
|
||||
|
@ -269,10 +272,10 @@ def test_djlint_check_passes(cookies, context_override):
|
|||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
["use_docker", "expected_test_script"],
|
||||
("use_docker", "expected_test_script"),
|
||||
[
|
||||
("n", "pytest"),
|
||||
("y", "docker compose -f local.yml run django pytest"),
|
||||
("y", "docker compose -f docker-compose.local.yml run django pytest"),
|
||||
],
|
||||
)
|
||||
def test_travis_invokes_pytest(cookies, context, use_docker, expected_test_script):
|
||||
|
@ -284,20 +287,20 @@ def test_travis_invokes_pytest(cookies, context, use_docker, expected_test_scrip
|
|||
assert result.project_path.name == context["project_slug"]
|
||||
assert result.project_path.is_dir()
|
||||
|
||||
with open(f"{result.project_path}/.travis.yml") as travis_yml:
|
||||
with (result.project_path / ".travis.yml").open() as travis_yml:
|
||||
try:
|
||||
yml = yaml.safe_load(travis_yml)["jobs"]["include"]
|
||||
assert yml[0]["script"] == ["flake8"]
|
||||
assert yml[0]["script"] == ["ruff check ."]
|
||||
assert yml[1]["script"] == [expected_test_script]
|
||||
except yaml.YAMLError as e:
|
||||
pytest.fail(str(e))
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
["use_docker", "expected_test_script"],
|
||||
("use_docker", "expected_test_script"),
|
||||
[
|
||||
("n", "pytest"),
|
||||
("y", "docker compose -f local.yml run django pytest"),
|
||||
("y", "docker compose -f docker-compose.local.yml run django pytest"),
|
||||
],
|
||||
)
|
||||
def test_gitlab_invokes_precommit_and_pytest(cookies, context, use_docker, expected_test_script):
|
||||
|
@ -309,11 +312,11 @@ def test_gitlab_invokes_precommit_and_pytest(cookies, context, use_docker, expec
|
|||
assert result.project_path.name == context["project_slug"]
|
||||
assert result.project_path.is_dir()
|
||||
|
||||
with open(f"{result.project_path}/.gitlab-ci.yml") as gitlab_yml:
|
||||
with (result.project_path / ".gitlab-ci.yml").open() as gitlab_yml:
|
||||
try:
|
||||
gitlab_config = yaml.safe_load(gitlab_yml)
|
||||
assert gitlab_config["precommit"]["script"] == [
|
||||
"pre-commit run --show-diff-on-failure --color=always --all-files"
|
||||
"pre-commit run --show-diff-on-failure --color=always --all-files",
|
||||
]
|
||||
assert gitlab_config["pytest"]["script"] == [expected_test_script]
|
||||
except yaml.YAMLError as e:
|
||||
|
@ -321,10 +324,10 @@ def test_gitlab_invokes_precommit_and_pytest(cookies, context, use_docker, expec
|
|||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
["use_docker", "expected_test_script"],
|
||||
("use_docker", "expected_test_script"),
|
||||
[
|
||||
("n", "pytest"),
|
||||
("y", "docker compose -f local.yml run django pytest"),
|
||||
("y", "docker compose -f docker-compose.local.yml run django pytest"),
|
||||
],
|
||||
)
|
||||
def test_github_invokes_linter_and_pytest(cookies, context, use_docker, expected_test_script):
|
||||
|
@ -336,7 +339,7 @@ def test_github_invokes_linter_and_pytest(cookies, context, use_docker, expected
|
|||
assert result.project_path.name == context["project_slug"]
|
||||
assert result.project_path.is_dir()
|
||||
|
||||
with open(f"{result.project_path}/.github/workflows/ci.yml") as github_yml:
|
||||
with (result.project_path / ".github" / "workflows" / "ci.yml").open() as github_yml:
|
||||
try:
|
||||
github_config = yaml.safe_load(github_yml)
|
||||
linter_present = False
|
||||
|
@ -376,7 +379,7 @@ def test_error_if_incompatible(cookies, context, invalid_context):
|
|||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
["editor", "pycharm_docs_exist"],
|
||||
("editor", "pycharm_docs_exist"),
|
||||
[
|
||||
("None", False),
|
||||
("PyCharm", True),
|
||||
|
@ -387,9 +390,9 @@ def test_pycharm_docs_removed(cookies, context, editor, pycharm_docs_exist):
|
|||
context.update({"editor": editor})
|
||||
result = cookies.bake(extra_context=context)
|
||||
|
||||
with open(f"{result.project_path}/docs/index.rst") as f:
|
||||
has_pycharm_docs = "pycharm/configuration" in f.read()
|
||||
assert has_pycharm_docs is pycharm_docs_exist
|
||||
index_rst = result.project_path / "docs" / "index.rst"
|
||||
has_pycharm_docs = "pycharm/configuration" in index_rst.read_text()
|
||||
assert has_pycharm_docs is pycharm_docs_exist
|
||||
|
||||
|
||||
def test_trim_domain_email(cookies, context):
|
||||
|
@ -399,7 +402,7 @@ def test_trim_domain_email(cookies, context):
|
|||
"use_docker": "y",
|
||||
"domain_name": " example.com ",
|
||||
"email": " me@example.com ",
|
||||
}
|
||||
},
|
||||
)
|
||||
result = cookies.bake(extra_context=context)
|
||||
|
||||
|
|
|
@ -11,32 +11,41 @@ mkdir -p .cache/docker
|
|||
cd .cache/docker
|
||||
|
||||
# create the project using the default settings in cookiecutter.json
|
||||
cookiecutter ../../ --no-input --overwrite-if-exists use_docker=y "$@"
|
||||
uv run cookiecutter ../../ --no-input --overwrite-if-exists use_docker=y "$@"
|
||||
cd my_awesome_project
|
||||
|
||||
# make sure all images build
|
||||
docker compose -f local.yml build
|
||||
docker compose -f docker-compose.local.yml build
|
||||
|
||||
# run the project's type checks
|
||||
docker compose -f local.yml run django mypy my_awesome_project
|
||||
docker compose -f docker-compose.local.yml run --rm django mypy my_awesome_project
|
||||
|
||||
# run the project's tests
|
||||
docker compose -f local.yml run django pytest
|
||||
docker compose -f docker-compose.local.yml run --rm django pytest
|
||||
|
||||
# return non-zero status code if there are migrations that have not been created
|
||||
docker compose -f local.yml run django python manage.py makemigrations --dry-run --check || { echo "ERROR: there were changes in the models, but migration listed above have not been created and are not saved in version control"; exit 1; }
|
||||
docker compose -f docker-compose.local.yml run --rm django python manage.py makemigrations --check || { echo "ERROR: there were changes in the models, but migration listed above have not been created and are not saved in version control"; exit 1; }
|
||||
|
||||
# Test support for translations
|
||||
docker compose -f local.yml run django python manage.py makemessages --all
|
||||
docker compose -f docker-compose.local.yml run --rm django python manage.py makemessages --all
|
||||
|
||||
# Make sure the check doesn't raise any warnings
|
||||
docker compose -f local.yml run django python manage.py check --fail-level WARNING
|
||||
docker compose -f docker-compose.local.yml run --rm \
|
||||
-e DJANGO_SECRET_KEY="$(openssl rand -base64 64)" \
|
||||
-e REDIS_URL=redis://redis:6379/0 \
|
||||
-e DJANGO_AWS_ACCESS_KEY_ID=x \
|
||||
-e DJANGO_AWS_SECRET_ACCESS_KEY=x \
|
||||
-e DJANGO_AWS_STORAGE_BUCKET_NAME=x \
|
||||
-e DJANGO_ADMIN_URL=x \
|
||||
-e MAILGUN_API_KEY=x \
|
||||
-e MAILGUN_DOMAIN=x \
|
||||
django python manage.py check --settings=config.settings.production --deploy --database default --fail-level WARNING
|
||||
|
||||
# Generate the HTML for the documentation
|
||||
docker compose -f local.yml run docs make html
|
||||
docker compose -f docker-compose.docs.yml run --rm docs make html
|
||||
|
||||
# Run npm build script if package.json is present
|
||||
if [ -f "package.json" ]
|
||||
then
|
||||
docker compose -f local.yml run node npm run build
|
||||
docker compose -f docker-compose.local.yml run --rm node npm run build
|
||||
fi
|
||||
|
|
|
@ -1,4 +1,5 @@
|
|||
"""Unit tests for the hooks"""
|
||||
|
||||
import os
|
||||
from pathlib import Path
|
||||
|
||||
|
@ -7,7 +8,7 @@ import pytest
|
|||
from hooks.post_gen_project import append_to_gitignore_file
|
||||
|
||||
|
||||
@pytest.fixture()
|
||||
@pytest.fixture
|
||||
def working_directory(tmp_path):
|
||||
prev_cwd = Path.cwd()
|
||||
os.chdir(tmp_path)
|
||||
|
|
5
tox.ini
5
tox.ini
|
@ -1,12 +1,11 @@
|
|||
[tox]
|
||||
skipsdist = true
|
||||
envlist = py311,black-template
|
||||
envlist = py312,black-template
|
||||
|
||||
[testenv]
|
||||
deps = -rrequirements.txt
|
||||
passenv = AUTOFIXABLE_STYLES
|
||||
commands = pytest -n auto {posargs:./tests}
|
||||
|
||||
[testenv:black-template]
|
||||
deps = black
|
||||
commands = black --check hooks tests setup.py docs scripts
|
||||
commands = black --check hooks tests docs scripts
|
||||
|
|
|
@ -2,7 +2,7 @@
|
|||
{
|
||||
"name": "{{cookiecutter.project_slug}}_dev",
|
||||
"dockerComposeFile": [
|
||||
"../local.yml"
|
||||
"../docker-compose.local.yml"
|
||||
],
|
||||
"init": true,
|
||||
"mounts": [
|
||||
|
@ -35,24 +35,13 @@
|
|||
"analysis.typeCheckingMode": "basic",
|
||||
"defaultInterpreterPath": "/usr/local/bin/python",
|
||||
"editor.codeActionsOnSave": {
|
||||
"source.organizeImports": true
|
||||
"source.organizeImports": "always"
|
||||
},
|
||||
// Uncomment when fixed
|
||||
// https://github.com/microsoft/vscode-remote-release/issues/8474
|
||||
// "editor.defaultFormatter": "ms-python.black-formatter",
|
||||
"formatting.blackPath": "/usr/local/bin/black",
|
||||
"formatting.provider": "black",
|
||||
"editor.defaultFormatter": "charliermarsh.ruff",
|
||||
"languageServer": "Pylance",
|
||||
// "linting.banditPath": "/usr/local/py-utils/bin/bandit",
|
||||
"linting.enabled": true,
|
||||
"linting.flake8Enabled": true,
|
||||
"linting.flake8Path": "/usr/local/bin/flake8",
|
||||
"linting.mypyEnabled": true,
|
||||
"linting.mypyPath": "/usr/local/bin/mypy",
|
||||
"linting.pycodestylePath": "/usr/local/bin/pycodestyle",
|
||||
// "linting.pydocstylePath": "/usr/local/py-utils/bin/pydocstyle",
|
||||
"linting.pylintEnabled": true,
|
||||
"linting.pylintPath": "/usr/local/bin/pylint"
|
||||
}
|
||||
},
|
||||
// https://code.visualstudio.com/docs/remote/devcontainerjson-reference#_vs-code-specific-properties
|
||||
|
@ -65,8 +54,7 @@
|
|||
// python
|
||||
"ms-python.python",
|
||||
"ms-python.vscode-pylance",
|
||||
"ms-python.isort",
|
||||
"ms-python.black-formatter",
|
||||
"charliermarsh.ruff",
|
||||
// django
|
||||
"batisteo.vscode-django"
|
||||
]
|
||||
|
|
|
@ -7,13 +7,13 @@ environment:
|
|||
POSTGRES_DB: 'test_{{ cookiecutter.project_slug }}'
|
||||
POSTGRES_HOST_AUTH_METHOD: trust
|
||||
{%- if cookiecutter.use_celery == 'y' %}
|
||||
CELERY_BROKER_URL: 'redis://redis:6379/0'
|
||||
REDIS_URL: 'redis://redis:6379/0'
|
||||
{%- endif %}
|
||||
|
||||
steps:
|
||||
- name: lint
|
||||
pull: if-not-exists
|
||||
image: python:3.11
|
||||
image: python:3.12
|
||||
environment:
|
||||
PRE_COMMIT_HOME: ${CI_PROJECT_DIR}/.cache/pre-commit
|
||||
volumes:
|
||||
|
@ -27,16 +27,17 @@ steps:
|
|||
- name: test
|
||||
pull: if-not-exists
|
||||
{%- if cookiecutter.use_docker == 'y' %}
|
||||
image: docker/compose:1.29.2
|
||||
image: docker:25.0
|
||||
environment:
|
||||
DATABASE_URL: pgsql://$POSTGRES_USER:$POSTGRES_PASSWORD@postgres/$POSTGRES_DB
|
||||
commands:
|
||||
- docker-compose -f local.yml build
|
||||
- docker-compose -f local.yml run --rm django python manage.py migrate
|
||||
- docker-compose -f local.yml up -d
|
||||
- docker-compose -f local.yml run django pytest
|
||||
- docker-compose -f docker-compose.local.yml build
|
||||
- docker-compose -f docker-compose.docs.yml build
|
||||
- docker-compose -f docker-compose.local.yml run --rm django python manage.py migrate
|
||||
- docker-compose -f docker-compose.local.yml up -d
|
||||
- docker-compose -f docker-compose.local.yml run django pytest
|
||||
{%- else %}
|
||||
image: python:3.11
|
||||
image: python:3.12
|
||||
commands:
|
||||
- pip install -r requirements/local.txt
|
||||
- pytest
|
||||
|
|
|
@ -12,7 +12,7 @@ trim_trailing_whitespace = true
|
|||
indent_style = space
|
||||
indent_size = 4
|
||||
|
||||
[*.{html,css,scss,json,yml,xml}]
|
||||
[*.{html,css,scss,json,yml,xml,toml}]
|
||||
indent_style = space
|
||||
indent_size = 2
|
||||
|
||||
|
|
|
@ -28,8 +28,8 @@ POSTMARK_SERVER_TOKEN=
|
|||
SENDGRID_API_KEY=
|
||||
SENDGRID_GENERATE_MESSAGE_ID=True
|
||||
SENDGRID_MERGE_FIELD_FORMAT=None
|
||||
{% elif cookiecutter.mail_service == 'SendinBlue' %}
|
||||
SENDINBLUE_API_KEY=
|
||||
{% elif cookiecutter.mail_service == 'Brevo' %}
|
||||
BREVO_API_KEY=
|
||||
{% elif cookiecutter.mail_service == 'SparkPost' %}
|
||||
SPARKPOST_API_KEY=
|
||||
{% endif %}
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
# Config for Dependabot updates. See Documentation here:
|
||||
# https://help.github.com/github/administering-a-repository/configuration-options-for-dependency-updates
|
||||
# https://docs.github.com/en/code-security/dependabot/dependabot-version-updates/configuration-options-for-the-dependabot.yml-file
|
||||
|
||||
version: 2
|
||||
updates:
|
||||
|
@ -9,16 +9,20 @@ updates:
|
|||
# Every weekday
|
||||
schedule:
|
||||
interval: 'daily'
|
||||
groups:
|
||||
github-actions:
|
||||
patterns:
|
||||
- '*'
|
||||
|
||||
{%- if cookiecutter.use_docker == 'y' %}
|
||||
|
||||
# Enable version updates for Docker
|
||||
# We need to specify each Dockerfile in a separate entry because Dependabot doesn't
|
||||
# support wildcards or recursively checking subdirectories. Check this issue for updates:
|
||||
# https://github.com/dependabot/dependabot-core/issues/2178
|
||||
- package-ecosystem: 'docker'
|
||||
# Look for a `Dockerfile` in the `compose/local/django` directory
|
||||
directory: 'compose/local/django/'
|
||||
directories:
|
||||
- 'compose/local/django/'
|
||||
- 'compose/local/docs/'
|
||||
- 'compose/production/django/'
|
||||
# Every weekday
|
||||
schedule:
|
||||
interval: 'daily'
|
||||
|
@ -28,57 +32,22 @@ updates:
|
|||
update-types:
|
||||
- 'version-update:semver-major'
|
||||
- 'version-update:semver-minor'
|
||||
groups:
|
||||
docker-python:
|
||||
patterns:
|
||||
- '*'
|
||||
|
||||
|
||||
- package-ecosystem: 'docker'
|
||||
# Look for a `Dockerfile` in the `compose/local/docs` directory
|
||||
directory: 'compose/local/docs/'
|
||||
# Every weekday
|
||||
schedule:
|
||||
interval: 'daily'
|
||||
# Ignore minor version updates (3.10 -> 3.11) but update patch versions
|
||||
ignore:
|
||||
- dependency-name: '*'
|
||||
update-types:
|
||||
- 'version-update:semver-major'
|
||||
- 'version-update:semver-minor'
|
||||
|
||||
- package-ecosystem: 'docker'
|
||||
# Look for a `Dockerfile` in the `compose/local/node` directory
|
||||
directory: 'compose/local/node/'
|
||||
# Every weekday
|
||||
schedule:
|
||||
interval: 'daily'
|
||||
|
||||
- package-ecosystem: 'docker'
|
||||
# Look for a `Dockerfile` in the `compose/production/aws` directory
|
||||
directory: 'compose/production/aws/'
|
||||
# Every weekday
|
||||
schedule:
|
||||
interval: 'daily'
|
||||
|
||||
- package-ecosystem: 'docker'
|
||||
# Look for a `Dockerfile` in the `compose/production/django` directory
|
||||
directory: 'compose/production/django/'
|
||||
# Every weekday
|
||||
schedule:
|
||||
interval: 'daily'
|
||||
# Ignore minor version updates (3.10 -> 3.11) but update patch versions
|
||||
ignore:
|
||||
- dependency-name: '*'
|
||||
update-types:
|
||||
- 'version-update:semver-major'
|
||||
- 'version-update:semver-minor'
|
||||
|
||||
- package-ecosystem: 'docker'
|
||||
# Look for a `Dockerfile` in the `compose/production/postgres` directory
|
||||
directory: 'compose/production/postgres/'
|
||||
# Every weekday
|
||||
schedule:
|
||||
interval: 'daily'
|
||||
|
||||
- package-ecosystem: 'docker'
|
||||
# Look for a `Dockerfile` in the `compose/production/traefik` directory
|
||||
directory: 'compose/production/traefik/'
|
||||
# Look for a `Dockerfile` in the listed directories
|
||||
directories:
|
||||
- 'compose/local/node/'
|
||||
- 'compose/production/aws/'
|
||||
- 'compose/production/postgres/'
|
||||
- 'compose/production/traefik/'
|
||||
{%- if cookiecutter.cloud_provider == 'None' %}
|
||||
- 'compose/production/nginx/'
|
||||
{%- endif %}
|
||||
# Every weekday
|
||||
schedule:
|
||||
interval: 'daily'
|
||||
|
@ -88,11 +57,16 @@ updates:
|
|||
# Enable version updates for Python/Pip - Production
|
||||
- package-ecosystem: 'pip'
|
||||
# Look for a `requirements.txt` in the `root` directory
|
||||
# also 'setup.cfg', 'runtime.txt' and 'requirements/*.txt'
|
||||
# also 'setup.cfg', '.python-version' and 'requirements/*.txt'
|
||||
directory: '/'
|
||||
# Every weekday
|
||||
schedule:
|
||||
interval: 'daily'
|
||||
groups:
|
||||
python:
|
||||
update-types:
|
||||
- 'minor'
|
||||
- 'patch'
|
||||
|
||||
{%- if cookiecutter.frontend_pipeline == 'Gulp' %}
|
||||
|
||||
|
@ -103,5 +77,10 @@ updates:
|
|||
# Every weekday
|
||||
schedule:
|
||||
interval: 'daily'
|
||||
groups:
|
||||
javascript:
|
||||
update-types:
|
||||
- 'minor'
|
||||
- 'patch'
|
||||
|
||||
{%- endif %}
|
||||
|
|
|
@ -26,17 +26,17 @@ jobs:
|
|||
uses: actions/checkout@v4
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.11'
|
||||
python-version-file: '.python-version'
|
||||
|
||||
{%- if cookiecutter.open_source_license != 'Not open source' %}
|
||||
# Consider using pre-commit.ci for open source project
|
||||
{%- endif %}
|
||||
- name: Run pre-commit
|
||||
uses: pre-commit/action@v3.0.0
|
||||
uses: pre-commit/action@v3.0.1
|
||||
|
||||
# With no caching at all the entire ci process takes 4m 30s to complete!
|
||||
# With no caching at all the entire ci process takes 3m to complete!
|
||||
pytest:
|
||||
runs-on: ubuntu-latest
|
||||
{%- if cookiecutter.use_docker == 'n' %}
|
||||
|
@ -57,7 +57,7 @@ jobs:
|
|||
|
||||
env:
|
||||
{%- if cookiecutter.use_celery == 'y' %}
|
||||
CELERY_BROKER_URL: 'redis://localhost:6379/0'
|
||||
REDIS_URL: 'redis://localhost:6379/0'
|
||||
{%- endif %}
|
||||
# postgres://user:password@host:port/database
|
||||
DATABASE_URL: 'postgres://postgres:postgres@localhost:5432/postgres'
|
||||
|
@ -68,23 +68,49 @@ jobs:
|
|||
uses: actions/checkout@v4
|
||||
{%- if cookiecutter.use_docker == 'y' %}
|
||||
|
||||
- name: Build the Stack
|
||||
run: docker compose -f local.yml build
|
||||
- name: Set up Docker Buildx
|
||||
uses: docker/setup-buildx-action@v3
|
||||
|
||||
- name: Build and cache local backend
|
||||
uses: docker/bake-action@v6
|
||||
with:
|
||||
push: false
|
||||
load: true
|
||||
files: docker-compose.local.yml
|
||||
targets: django
|
||||
set: |
|
||||
django.cache-from=type=gha,scope=django-cached-tests
|
||||
django.cache-to=type=gha,scope=django-cached-tests,mode=max
|
||||
postgres.cache-from=type=gha,scope=postgres-cached-tests
|
||||
postgres.cache-to=type=gha,scope=postgres-cached-tests,mode=max
|
||||
|
||||
- name: Build and cache docs
|
||||
uses: docker/bake-action@v6
|
||||
with:
|
||||
push: false
|
||||
load: true
|
||||
files: docker-compose.docs.yml
|
||||
set: |
|
||||
docs.cache-from=type=gha,scope=cached-docs
|
||||
docs.cache-to=type=gha,scope=cached-docs,mode=max
|
||||
|
||||
- name: Check DB Migrations
|
||||
run: docker compose -f docker-compose.local.yml run --rm django python manage.py makemigrations --check
|
||||
|
||||
- name: Run DB Migrations
|
||||
run: docker compose -f local.yml run --rm django python manage.py migrate
|
||||
run: docker compose -f docker-compose.local.yml run --rm django python manage.py migrate
|
||||
|
||||
- name: Run Django Tests
|
||||
run: docker compose -f local.yml run django pytest
|
||||
run: docker compose -f docker-compose.local.yml run django pytest
|
||||
|
||||
- name: Tear down the Stack
|
||||
run: docker compose -f local.yml down
|
||||
run: docker compose -f docker-compose.local.yml down
|
||||
{%- else %}
|
||||
|
||||
- name: Set up Python
|
||||
uses: actions/setup-python@v4
|
||||
uses: actions/setup-python@v5
|
||||
with:
|
||||
python-version: '3.11'
|
||||
python-version-file: '.python-version'
|
||||
cache: pip
|
||||
cache-dependency-path: |
|
||||
requirements/base.txt
|
||||
|
@ -95,6 +121,12 @@ jobs:
|
|||
python -m pip install --upgrade pip
|
||||
pip install -r requirements/local.txt
|
||||
|
||||
- name: Check DB Migrations
|
||||
run: python manage.py makemigrations --check
|
||||
|
||||
- name: Run DB Migrations
|
||||
run: python manage.py migrate
|
||||
|
||||
- name: Test with pytest
|
||||
run: pytest
|
||||
{%- endif %}
|
||||
|
|
3
{{cookiecutter.project_slug}}/.gitignore
vendored
3
{{cookiecutter.project_slug}}/.gitignore
vendored
|
@ -59,9 +59,6 @@ docs/_build/
|
|||
# PyBuilder
|
||||
target/
|
||||
|
||||
# pyenv
|
||||
.python-version
|
||||
|
||||
{% if cookiecutter.use_celery == 'y' -%}
|
||||
# celery beat schedule file
|
||||
celerybeat-schedule
|
||||
|
|
|
@ -8,12 +8,12 @@ variables:
|
|||
POSTGRES_DB: 'test_{{ cookiecutter.project_slug }}'
|
||||
POSTGRES_HOST_AUTH_METHOD: trust
|
||||
{%- if cookiecutter.use_celery == 'y' %}
|
||||
CELERY_BROKER_URL: 'redis://redis:6379/0'
|
||||
REDIS_URL: 'redis://redis:6379/0'
|
||||
{%- endif %}
|
||||
|
||||
precommit:
|
||||
stage: lint
|
||||
image: python:3.11
|
||||
image: python:3.12
|
||||
variables:
|
||||
PRE_COMMIT_HOME: ${CI_PROJECT_DIR}/.cache/pre-commit
|
||||
cache:
|
||||
|
@ -27,22 +27,19 @@ precommit:
|
|||
pytest:
|
||||
stage: test
|
||||
{%- if cookiecutter.use_docker == 'y' %}
|
||||
image: docker/compose:1.29.2
|
||||
tags:
|
||||
- docker
|
||||
image: docker:25.0
|
||||
services:
|
||||
- docker:dind
|
||||
before_script:
|
||||
- docker compose -f local.yml build
|
||||
- docker compose -f docker-compose.local.yml build
|
||||
- docker compose -f docker-compose.docs.yml build
|
||||
# Ensure celerybeat does not crash due to non-existent tables
|
||||
- docker compose -f local.yml run --rm django python manage.py migrate
|
||||
- docker compose -f local.yml up -d
|
||||
- docker compose -f docker-compose.local.yml run --rm django python manage.py migrate
|
||||
- docker compose -f docker-compose.local.yml up -d
|
||||
script:
|
||||
- docker compose -f local.yml run django pytest
|
||||
- docker compose -f docker-compose.local.yml run django pytest
|
||||
{%- else %}
|
||||
image: python:3.11
|
||||
tags:
|
||||
- python
|
||||
image: python:3.12
|
||||
services:
|
||||
- postgres:{{ cookiecutter.postgresql_version }}
|
||||
variables:
|
||||
|
|
|
@ -15,7 +15,7 @@
|
|||
{%- endif %}
|
||||
</list>
|
||||
</option>
|
||||
<option name="sourceFilePath" value="local.yml"/>
|
||||
<option name="sourceFilePath" value="docker-compose.local.yml"/>
|
||||
</settings>
|
||||
</deployment>
|
||||
<method v="2"/>
|
||||
|
|
|
@ -8,7 +8,7 @@
|
|||
<option value="docs"/>
|
||||
</list>
|
||||
</option>
|
||||
<option name="sourceFilePath" value="local.yml"/>
|
||||
<option name="sourceFilePath" value="docker-compose.local.yml"/>
|
||||
</settings>
|
||||
</deployment>
|
||||
<method v="2"/>
|
||||
|
|
|
@ -1,12 +1,13 @@
|
|||
exclude: '^docs/|/migrations/|devcontainer.json'
|
||||
default_stages: [commit]
|
||||
default_stages: [pre-commit]
|
||||
minimum_pre_commit_version: "3.2.0"
|
||||
|
||||
default_language_version:
|
||||
python: python3.11
|
||||
python: python3.12
|
||||
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.5.0
|
||||
rev: v5.0.0
|
||||
hooks:
|
||||
- id: trailing-whitespace
|
||||
- id: end-of-file-fixer
|
||||
|
@ -28,34 +29,23 @@ repos:
|
|||
exclude: '{{cookiecutter.project_slug}}/templates/'
|
||||
|
||||
- repo: https://github.com/adamchainz/django-upgrade
|
||||
rev: '1.15.0'
|
||||
rev: '1.24.0'
|
||||
hooks:
|
||||
- id: django-upgrade
|
||||
args: ['--target-version', '4.2']
|
||||
args: ['--target-version', '5.0']
|
||||
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
rev: v3.15.0
|
||||
# Run the Ruff linter.
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.11.4
|
||||
hooks:
|
||||
- id: pyupgrade
|
||||
args: [--py311-plus]
|
||||
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 23.12.1
|
||||
hooks:
|
||||
- id: black
|
||||
|
||||
- repo: https://github.com/PyCQA/isort
|
||||
rev: 5.13.2
|
||||
hooks:
|
||||
- id: isort
|
||||
|
||||
- repo: https://github.com/PyCQA/flake8
|
||||
rev: 7.0.0
|
||||
hooks:
|
||||
- id: flake8
|
||||
# Linter
|
||||
- id: ruff
|
||||
args: [--fix, --exit-non-zero-on-fix]
|
||||
# Formatter
|
||||
- id: ruff-format
|
||||
|
||||
- repo: https://github.com/Riverside-Healthcare/djLint
|
||||
rev: v1.34.1
|
||||
rev: v1.36.4
|
||||
hooks:
|
||||
- id: djlint-reformat-django
|
||||
- id: djlint-django
|
||||
|
|
1
{{cookiecutter.project_slug}}/.python-version
Normal file
1
{{cookiecutter.project_slug}}/.python-version
Normal file
|
@ -0,0 +1 @@
|
|||
3.12
|
|
@ -8,7 +8,7 @@ version: 2
|
|||
build:
|
||||
os: ubuntu-22.04
|
||||
tools:
|
||||
python: '3.11'
|
||||
python: '3.12'
|
||||
|
||||
# Build documentation in the docs/ directory with Sphinx
|
||||
sphinx:
|
||||
|
|
|
@ -2,7 +2,7 @@ dist: focal
|
|||
|
||||
language: python
|
||||
python:
|
||||
- "3.11"
|
||||
- "3.12"
|
||||
|
||||
services:
|
||||
- {% if cookiecutter.use_docker == 'y' %}docker{% else %}postgresql{% endif %}
|
||||
|
@ -10,23 +10,24 @@ jobs:
|
|||
include:
|
||||
- name: "Linter"
|
||||
before_script:
|
||||
- pip install -q flake8
|
||||
- pip install -q ruff
|
||||
script:
|
||||
- "flake8"
|
||||
- ruff check .
|
||||
|
||||
- name: "Django Test"
|
||||
{%- if cookiecutter.use_docker == 'y' %}
|
||||
before_script:
|
||||
- docker compose -v
|
||||
- docker -v
|
||||
- docker compose -f local.yml build
|
||||
- docker compose -f docker-compose.local.yml build
|
||||
- docker compose -f docker-compose.docs.yml build
|
||||
# Ensure celerybeat does not crash due to non-existent tables
|
||||
- docker compose -f local.yml run --rm django python manage.py migrate
|
||||
- docker compose -f local.yml up -d
|
||||
- docker compose -f docker-compose.local.yml run --rm django python manage.py migrate
|
||||
- docker compose -f docker-compose.local.yml up -d
|
||||
script:
|
||||
- "docker compose -f local.yml run django pytest"
|
||||
- docker compose -f docker-compose.local.yml run django pytest
|
||||
after_failure:
|
||||
- docker compose -f local.yml logs
|
||||
- docker compose -f docker-compose.local.yml logs
|
||||
{%- else %}
|
||||
before_install:
|
||||
- sudo apt-get update -qq
|
||||
|
@ -37,9 +38,9 @@ jobs:
|
|||
- sudo apt-get install -qq libsqlite3-dev libxml2 libxml2-dev libssl-dev libbz2-dev wget curl llvm
|
||||
language: python
|
||||
python:
|
||||
- "3.11"
|
||||
- "3.12"
|
||||
install:
|
||||
- pip install -r requirements/local.txt
|
||||
script:
|
||||
- "pytest"
|
||||
- pytest
|
||||
{%- endif %}
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
release: python manage.py migrate
|
||||
{%- if cookiecutter.use_async == "y" %}
|
||||
web: gunicorn config.asgi:application -k uvicorn.workers.UvicornWorker
|
||||
web: gunicorn config.asgi:application -k uvicorn_worker.UvicornWorker
|
||||
{%- else %}
|
||||
web: gunicorn config.wsgi:application
|
||||
{%- endif %}
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
{{ cookiecutter.description }}
|
||||
|
||||
[](https://github.com/cookiecutter/cookiecutter-django/)
|
||||
[](https://github.com/ambv/black)
|
||||
[](https://github.com/astral-sh/ruff)
|
||||
|
||||
{%- if cookiecutter.open_source_license != "Not open source" %}
|
||||
|
||||
|
@ -12,7 +12,7 @@ License: {{cookiecutter.open_source_license}}
|
|||
|
||||
## Settings
|
||||
|
||||
Moved to [settings](http://cookiecutter-django.readthedocs.io/en/latest/settings.html).
|
||||
Moved to [settings](https://cookiecutter-django.readthedocs.io/en/latest/1-getting-started/settings.html).
|
||||
|
||||
## Basic Commands
|
||||
|
||||
|
@ -46,7 +46,7 @@ To run the tests, check your test coverage, and generate an HTML coverage report
|
|||
|
||||
### Live reloading and Sass CSS compilation
|
||||
|
||||
Moved to [Live reloading and SASS compilation](https://cookiecutter-django.readthedocs.io/en/latest/developing-locally.html#sass-compilation-live-reloading).
|
||||
Moved to [Live reloading and SASS compilation](https://cookiecutter-django.readthedocs.io/en/latest/2-local-development/developing-locally.html#using-webpack-or-gulp).
|
||||
|
||||
{%- if cookiecutter.use_celery == "y" %}
|
||||
|
||||
|
@ -87,7 +87,7 @@ celery -A config.celery_app worker -B -l info
|
|||
In development, it is often nice to be able to see emails that are being sent from your application. For that reason local SMTP server [Mailpit](https://github.com/axllent/mailpit) with a web interface is available as docker container.
|
||||
|
||||
Container mailpit will start automatically when you will run all docker containers.
|
||||
Please check [cookiecutter-django Docker documentation](http://cookiecutter-django.readthedocs.io/en/latest/deployment-with-docker.html) for more details how to start all containers.
|
||||
Please check [cookiecutter-django Docker documentation](https://cookiecutter-django.readthedocs.io/en/latest/2-local-development/developing-locally-docker.html) for more details how to start all containers.
|
||||
|
||||
With Mailpit running, to view messages that are sent by your application, open your browser and go to `http://127.0.0.1:8025`
|
||||
{%- else %}
|
||||
|
@ -130,14 +130,14 @@ The following details how to deploy this application.
|
|||
|
||||
### Heroku
|
||||
|
||||
See detailed [cookiecutter-django Heroku documentation](http://cookiecutter-django.readthedocs.io/en/latest/deployment-on-heroku.html).
|
||||
See detailed [cookiecutter-django Heroku documentation](https://cookiecutter-django.readthedocs.io/en/latest/3-deployment/deployment-on-heroku.html).
|
||||
|
||||
{%- endif %}
|
||||
{%- if cookiecutter.use_docker.lower() == "y" %}
|
||||
|
||||
### Docker
|
||||
|
||||
See detailed [cookiecutter-django Docker documentation](http://cookiecutter-django.readthedocs.io/en/latest/deployment-with-docker.html).
|
||||
See detailed [cookiecutter-django Docker documentation](https://cookiecutter-django.readthedocs.io/en/latest/3-deployment/deployment-with-docker.html).
|
||||
|
||||
{%- endif %}
|
||||
{%- if cookiecutter.frontend_pipeline in ['Gulp', 'Webpack'] %}
|
||||
|
|
|
@ -1,8 +1,8 @@
|
|||
# define an alias for the specific python version used in this file.
|
||||
FROM python:3.11.7-slim-bookworm as python
|
||||
FROM docker.io/python:3.12.9-slim-bookworm AS python
|
||||
|
||||
# Python build stage
|
||||
FROM python as python-build-stage
|
||||
FROM python AS python-build-stage
|
||||
|
||||
ARG BUILD_ENVIRONMENT=local
|
||||
|
||||
|
@ -10,7 +10,7 @@ ARG BUILD_ENVIRONMENT=local
|
|||
RUN apt-get update && apt-get install --no-install-recommends -y \
|
||||
# dependencies for building Python packages
|
||||
build-essential \
|
||||
# psycopg2 dependencies
|
||||
# psycopg dependencies
|
||||
libpq-dev
|
||||
|
||||
# Requirements are installed here to ensure they will be cached.
|
||||
|
@ -22,14 +22,14 @@ RUN pip wheel --wheel-dir /usr/src/app/wheels \
|
|||
|
||||
|
||||
# Python 'run' stage
|
||||
FROM python as python-run-stage
|
||||
FROM python AS python-run-stage
|
||||
|
||||
ARG BUILD_ENVIRONMENT=local
|
||||
ARG APP_HOME=/app
|
||||
|
||||
ENV PYTHONUNBUFFERED 1
|
||||
ENV PYTHONDONTWRITEBYTECODE 1
|
||||
ENV BUILD_ENV ${BUILD_ENVIRONMENT}
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
ENV PYTHONDONTWRITEBYTECODE=1
|
||||
ENV BUILD_ENV=${BUILD_ENVIRONMENT}
|
||||
|
||||
WORKDIR ${APP_HOME}
|
||||
|
||||
|
@ -47,8 +47,9 @@ RUN groupadd --gid 1000 dev-user \
|
|||
|
||||
# Install required system dependencies
|
||||
RUN apt-get update && apt-get install --no-install-recommends -y \
|
||||
# psycopg2 dependencies
|
||||
libpq-dev \
|
||||
# psycopg dependencies
|
||||
libpq-dev \
|
||||
wait-for-it \
|
||||
# Translations dependencies
|
||||
gettext \
|
||||
# cleaning up unused files
|
||||
|
|
|
@ -3,6 +3,14 @@
|
|||
set -o errexit
|
||||
set -o nounset
|
||||
|
||||
|
||||
until timeout 10 celery -A config.celery_app inspect ping; do
|
||||
>&2 echo "Celery workers not available"
|
||||
done
|
||||
|
||||
echo 'Starting flower'
|
||||
|
||||
|
||||
exec watchfiles --filter python celery.__main__.main \
|
||||
--args \
|
||||
"-A config.celery_app -b \"${CELERY_BROKER_URL}\" flower --basic_auth=\"${CELERY_FLOWER_USER}:${CELERY_FLOWER_PASSWORD}\""
|
||||
"-A config.celery_app -b \"${REDIS_URL}\" flower --basic_auth=\"${CELERY_FLOWER_USER}:${CELERY_FLOWER_PASSWORD}\""
|
||||
|
|
|
@ -1,16 +1,16 @@
|
|||
# define an alias for the specific python version used in this file.
|
||||
FROM python:3.11.7-slim-bookworm as python
|
||||
FROM docker.io/python:3.12.9-slim-bookworm AS python
|
||||
|
||||
|
||||
# Python build stage
|
||||
FROM python as python-build-stage
|
||||
FROM python AS python-build-stage
|
||||
|
||||
ENV PYTHONDONTWRITEBYTECODE 1
|
||||
ENV PYTHONDONTWRITEBYTECODE=1
|
||||
|
||||
RUN apt-get update && apt-get install --no-install-recommends -y \
|
||||
# dependencies for building Python packages
|
||||
build-essential \
|
||||
# psycopg2 dependencies
|
||||
# psycopg dependencies
|
||||
libpq-dev \
|
||||
# cleaning up unused files
|
||||
&& apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false \
|
||||
|
@ -26,16 +26,16 @@ RUN pip wheel --no-cache-dir --wheel-dir /usr/src/app/wheels \
|
|||
|
||||
|
||||
# Python 'run' stage
|
||||
FROM python as python-run-stage
|
||||
FROM python AS python-run-stage
|
||||
|
||||
ARG BUILD_ENVIRONMENT
|
||||
ENV PYTHONUNBUFFERED 1
|
||||
ENV PYTHONDONTWRITEBYTECODE 1
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
ENV PYTHONDONTWRITEBYTECODE=1
|
||||
|
||||
RUN apt-get update && apt-get install --no-install-recommends -y \
|
||||
# To run the Makefile
|
||||
make \
|
||||
# psycopg2 dependencies
|
||||
# psycopg dependencies
|
||||
libpq-dev \
|
||||
# Translations dependencies
|
||||
gettext \
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
FROM node:20-bookworm-slim
|
||||
FROM docker.io/node:22.14-bookworm-slim
|
||||
|
||||
WORKDIR /app
|
||||
|
||||
|
@ -6,4 +6,4 @@ COPY ./package.json /app
|
|||
|
||||
RUN npm install && npm cache clean --force
|
||||
|
||||
ENV PATH ./node_modules/.bin/:$PATH
|
||||
ENV PATH=./node_modules/.bin/:$PATH
|
||||
|
|
|
@ -1,4 +1,8 @@
|
|||
FROM garland/aws-cli-docker:1.16.140
|
||||
FROM docker.io/amazon/aws-cli:2.25.0
|
||||
|
||||
# Clear entrypoint from the base image, otherwise it's always calling the aws CLI
|
||||
ENTRYPOINT []
|
||||
CMD ["/bin/bash"]
|
||||
|
||||
COPY ./compose/production/aws/maintenance /usr/local/bin/maintenance
|
||||
COPY ./compose/production/postgres/maintenance/_sourced /usr/local/bin/maintenance/_sourced
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
### Download a file from your Amazon S3 bucket to the postgres /backups folder
|
||||
###
|
||||
### Usage:
|
||||
### $ docker compose -f production.yml run --rm awscli <1>
|
||||
### $ docker compose -f docker-compose.production.yml run --rm awscli <1>
|
||||
|
||||
set -o errexit
|
||||
set -o pipefail
|
||||
|
|
|
@ -3,7 +3,7 @@
|
|||
### Upload the /backups folder to Amazon S3
|
||||
###
|
||||
### Usage:
|
||||
### $ docker compose -f production.yml run --rm awscli upload
|
||||
### $ docker compose -f docker-compose.production.yml run --rm awscli upload
|
||||
|
||||
set -o errexit
|
||||
set -o pipefail
|
||||
|
|
|
@ -1,5 +1,5 @@
|
|||
{% if cookiecutter.frontend_pipeline in ['Gulp', 'Webpack'] -%}
|
||||
FROM node:20-bookworm-slim as client-builder
|
||||
FROM docker.io/node:22.14-bookworm-slim AS client-builder
|
||||
|
||||
ARG APP_HOME=/app
|
||||
WORKDIR ${APP_HOME}
|
||||
|
@ -25,10 +25,10 @@ RUN npm run build
|
|||
|
||||
{%- endif %}
|
||||
# define an alias for the specific python version used in this file.
|
||||
FROM python:3.11.7-slim-bookworm as python
|
||||
FROM docker.io/python:3.12.9-slim-bookworm AS python
|
||||
|
||||
# Python build stage
|
||||
FROM python as python-build-stage
|
||||
FROM python AS python-build-stage
|
||||
|
||||
ARG BUILD_ENVIRONMENT=production
|
||||
|
||||
|
@ -36,9 +36,10 @@ ARG BUILD_ENVIRONMENT=production
|
|||
RUN apt-get update && apt-get install --no-install-recommends -y \
|
||||
# dependencies for building Python packages
|
||||
build-essential \
|
||||
# psycopg2 dependencies
|
||||
# psycopg dependencies
|
||||
libpq-dev
|
||||
|
||||
|
||||
# Requirements are installed here to ensure they will be cached.
|
||||
COPY ./requirements .
|
||||
|
||||
|
@ -48,27 +49,29 @@ RUN pip wheel --wheel-dir /usr/src/app/wheels \
|
|||
|
||||
|
||||
# Python 'run' stage
|
||||
FROM python as python-run-stage
|
||||
FROM python AS python-run-stage
|
||||
|
||||
ARG BUILD_ENVIRONMENT=production
|
||||
ARG APP_HOME=/app
|
||||
|
||||
ENV PYTHONUNBUFFERED 1
|
||||
ENV PYTHONDONTWRITEBYTECODE 1
|
||||
ENV BUILD_ENV ${BUILD_ENVIRONMENT}
|
||||
ENV PYTHONUNBUFFERED=1
|
||||
ENV PYTHONDONTWRITEBYTECODE=1
|
||||
ENV BUILD_ENV=${BUILD_ENVIRONMENT}
|
||||
|
||||
WORKDIR ${APP_HOME}
|
||||
|
||||
RUN addgroup --system django \
|
||||
&& adduser --system --ingroup django django
|
||||
&& adduser --system --ingroup django django
|
||||
|
||||
|
||||
# Install required system dependencies
|
||||
RUN apt-get update && apt-get install --no-install-recommends -y \
|
||||
# psycopg2 dependencies
|
||||
# psycopg dependencies
|
||||
libpq-dev \
|
||||
# Translations dependencies
|
||||
gettext \
|
||||
# entrypoint
|
||||
wait-for-it \
|
||||
# cleaning up unused files
|
||||
&& apt-get purge -y --auto-remove -o APT::AutoRemove::RecommendsImportant=false \
|
||||
&& rm -rf /var/lib/apt/lists/*
|
||||
|
@ -117,14 +120,11 @@ COPY --chown=django:django . ${APP_HOME}
|
|||
{%- endif %}
|
||||
|
||||
# make django owner of the WORKDIR directory as well.
|
||||
RUN chown django:django ${APP_HOME}
|
||||
RUN chown -R django:django ${APP_HOME}
|
||||
|
||||
USER django
|
||||
|
||||
RUN DATABASE_URL="" \
|
||||
{%- if cookiecutter.use_celery == "y" %}
|
||||
CELERY_BROKER_URL="" \
|
||||
{%- endif %}
|
||||
DJANGO_SETTINGS_MODULE="config.settings.test" \
|
||||
python manage.py compilemessages
|
||||
|
||||
|
|
|
@ -4,8 +4,16 @@ set -o errexit
|
|||
set -o nounset
|
||||
|
||||
|
||||
|
||||
until timeout 10 celery -A config.celery_app inspect ping; do
|
||||
>&2 echo "Celery workers not available"
|
||||
done
|
||||
|
||||
echo 'Starting flower'
|
||||
|
||||
|
||||
exec celery \
|
||||
-A config.celery_app \
|
||||
-b "${CELERY_BROKER_URL}" \
|
||||
-b "${REDIS_URL}" \
|
||||
flower \
|
||||
--basic_auth="${CELERY_FLOWER_USER}:${CELERY_FLOWER_PASSWORD}"
|
||||
|
|
|
@ -4,45 +4,13 @@ set -o errexit
|
|||
set -o pipefail
|
||||
set -o nounset
|
||||
|
||||
|
||||
{% if cookiecutter.use_celery == 'y' %}
|
||||
# N.B. If only .env files supported variable expansion...
|
||||
export CELERY_BROKER_URL="${REDIS_URL}"
|
||||
{% endif %}
|
||||
|
||||
if [ -z "${POSTGRES_USER}" ]; then
|
||||
base_postgres_image_default_user='postgres'
|
||||
export POSTGRES_USER="${base_postgres_image_default_user}"
|
||||
fi
|
||||
export DATABASE_URL="postgres://${POSTGRES_USER}:${POSTGRES_PASSWORD}@${POSTGRES_HOST}:${POSTGRES_PORT}/${POSTGRES_DB}"
|
||||
|
||||
python << END
|
||||
import sys
|
||||
import time
|
||||
|
||||
import psycopg
|
||||
|
||||
suggest_unrecoverable_after = 30
|
||||
start = time.time()
|
||||
|
||||
while True:
|
||||
try:
|
||||
psycopg.connect(
|
||||
dbname="${POSTGRES_DB}",
|
||||
user="${POSTGRES_USER}",
|
||||
password="${POSTGRES_PASSWORD}",
|
||||
host="${POSTGRES_HOST}",
|
||||
port="${POSTGRES_PORT}",
|
||||
)
|
||||
break
|
||||
except psycopg.OperationalError as error:
|
||||
sys.stderr.write("Waiting for PostgreSQL to become available...\n")
|
||||
|
||||
if time.time() - start > suggest_unrecoverable_after:
|
||||
sys.stderr.write(" This is taking longer than expected. The following exception may be indicative of an unrecoverable error: '{}'\n".format(error))
|
||||
|
||||
time.sleep(1)
|
||||
END
|
||||
wait-for-it "${POSTGRES_HOST}:${POSTGRES_PORT}" -t 30
|
||||
|
||||
>&2 echo 'PostgreSQL is available'
|
||||
|
||||
|
|
|
@ -28,7 +28,7 @@ if compress_enabled; then
|
|||
fi
|
||||
{%- endif %}
|
||||
{%- if cookiecutter.use_async == 'y' %}
|
||||
exec /usr/local/bin/gunicorn config.asgi --bind 0.0.0.0:5000 --chdir=/app -k uvicorn.workers.UvicornWorker
|
||||
exec /usr/local/bin/gunicorn config.asgi --bind 0.0.0.0:5000 --chdir=/app -k uvicorn_worker.UvicornWorker
|
||||
{%- else %}
|
||||
exec /usr/local/bin/gunicorn config.wsgi --bind 0.0.0.0:5000 --chdir=/app
|
||||
{%- endif %}
|
||||
|
|
|
@ -1,2 +1,2 @@
|
|||
FROM nginx:1.17.8-alpine
|
||||
FROM docker.io/nginx:1.17.8-alpine
|
||||
COPY ./compose/production/nginx/default.conf /etc/nginx/conf.d/default.conf
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
FROM postgres:{{ cookiecutter.postgresql_version }}
|
||||
FROM docker.io/postgres:{{ cookiecutter.postgresql_version }}
|
||||
|
||||
COPY ./compose/production/postgres/maintenance /usr/local/bin/maintenance
|
||||
RUN chmod +x /usr/local/bin/maintenance/*
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
FROM traefik:2.10.7
|
||||
FROM docker.io/traefik:3.3.5
|
||||
RUN mkdir -p /etc/traefik/acme \
|
||||
&& touch /etc/traefik/acme/acme.json \
|
||||
&& chmod 600 /etc/traefik/acme/acme.json
|
||||
|
|
|
@ -6,7 +6,7 @@ entryPoints:
|
|||
# http
|
||||
address: ':80'
|
||||
http:
|
||||
# https://docs.traefik.io/routing/entrypoints/#entrypoint
|
||||
# https://doc.traefik.io/traefik/routing/entrypoints/#entrypoint
|
||||
redirections:
|
||||
entryPoint:
|
||||
to: web-secure
|
||||
|
@ -22,11 +22,11 @@ entryPoints:
|
|||
|
||||
certificatesResolvers:
|
||||
letsencrypt:
|
||||
# https://docs.traefik.io/master/https/acme/#lets-encrypt
|
||||
# https://doc.traefik.io/traefik/https/acme/#lets-encrypt
|
||||
acme:
|
||||
email: '{{ cookiecutter.email }}'
|
||||
storage: /etc/traefik/acme/acme.json
|
||||
# https://docs.traefik.io/master/https/acme/#httpchallenge
|
||||
# https://doc.traefik.io/traefik/https/acme/#httpchallenge
|
||||
httpChallenge:
|
||||
entryPoint: web
|
||||
|
||||
|
@ -44,7 +44,7 @@ http:
|
|||
- csrf
|
||||
service: django
|
||||
tls:
|
||||
# https://docs.traefik.io/master/routing/routers/#certresolver
|
||||
# https://doc.traefik.io/traefik/routing/routers/#certresolver
|
||||
certResolver: letsencrypt
|
||||
{%- if cookiecutter.use_celery == 'y' %}
|
||||
|
||||
|
@ -54,7 +54,7 @@ http:
|
|||
- flower
|
||||
service: flower
|
||||
tls:
|
||||
# https://docs.traefik.io/master/routing/routers/#certresolver
|
||||
# https://doc.traefik.io/traefik/master/routing/routers/#certresolver
|
||||
certResolver: letsencrypt
|
||||
{%- endif %}
|
||||
{%- if cookiecutter.cloud_provider == 'None' %}
|
||||
|
@ -76,7 +76,7 @@ http:
|
|||
|
||||
middlewares:
|
||||
csrf:
|
||||
# https://docs.traefik.io/master/middlewares/headers/#hostsproxyheaders
|
||||
# https://doc.traefik.io/traefik/master/middlewares/http/headers/#hostsproxyheaders
|
||||
# https://docs.djangoproject.com/en/dev/ref/csrf/#ajax
|
||||
headers:
|
||||
hostsProxyHeaders: ['X-CSRFToken']
|
||||
|
@ -102,7 +102,7 @@ http:
|
|||
{%- endif %}
|
||||
|
||||
providers:
|
||||
# https://docs.traefik.io/master/providers/file/
|
||||
# https://doc.traefik.io/traefik/master/providers/file/
|
||||
file:
|
||||
filename: /etc/traefik/traefik.yml
|
||||
watch: true
|
||||
|
|
|
@ -1,12 +1,10 @@
|
|||
from django.conf import settings
|
||||
from rest_framework.routers import DefaultRouter, SimpleRouter
|
||||
from rest_framework.routers import DefaultRouter
|
||||
from rest_framework.routers import SimpleRouter
|
||||
|
||||
from {{ cookiecutter.project_slug }}.users.api.views import UserViewSet
|
||||
|
||||
if settings.DEBUG:
|
||||
router = DefaultRouter()
|
||||
else:
|
||||
router = SimpleRouter()
|
||||
router = DefaultRouter() if settings.DEBUG else SimpleRouter()
|
||||
|
||||
router.register("users", UserViewSet)
|
||||
|
||||
|
|
|
@ -7,6 +7,7 @@ For more information on this file, see
|
|||
https://docs.djangoproject.com/en/dev/howto/deployment/asgi/
|
||||
|
||||
"""
|
||||
|
||||
import os
|
||||
import sys
|
||||
from pathlib import Path
|
||||
|
@ -23,12 +24,9 @@ os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.local")
|
|||
|
||||
# This application object is used by any ASGI server configured to use this file.
|
||||
django_application = get_asgi_application()
|
||||
# Apply ASGI middleware here.
|
||||
# from helloworld.asgi import HelloWorldApplication
|
||||
# application = HelloWorldApplication(application)
|
||||
|
||||
# Import websocket application here, so apps from django_application are loaded first
|
||||
from config.websocket import websocket_application # noqa isort:skip
|
||||
from config.websocket import websocket_application # noqa: E402
|
||||
|
||||
|
||||
async def application(scope, receive, send):
|
||||
|
@ -37,4 +35,5 @@ async def application(scope, receive, send):
|
|||
elif scope["type"] == "websocket":
|
||||
await websocket_application(scope, receive, send)
|
||||
else:
|
||||
raise NotImplementedError(f"Unknown scope type {scope['type']}")
|
||||
msg = f"Unknown scope type {scope['type']}"
|
||||
raise NotImplementedError(msg)
|
||||
|
|
|
@ -1,6 +1,7 @@
|
|||
import os
|
||||
|
||||
from celery import Celery
|
||||
from celery.signals import setup_logging
|
||||
|
||||
# set the default Django settings module for the 'celery' program.
|
||||
os.environ.setdefault("DJANGO_SETTINGS_MODULE", "config.settings.local")
|
||||
|
@ -13,5 +14,15 @@ app = Celery("{{cookiecutter.project_slug}}")
|
|||
# should have a `CELERY_` prefix.
|
||||
app.config_from_object("django.conf:settings", namespace="CELERY")
|
||||
|
||||
|
||||
@setup_logging.connect
|
||||
def config_loggers(*args, **kwargs):
|
||||
from logging.config import dictConfig
|
||||
|
||||
from django.conf import settings
|
||||
|
||||
dictConfig(settings.LOGGING)
|
||||
|
||||
|
||||
# Load task modules from all registered Django app configs.
|
||||
app.autodiscover_tasks()
|
||||
|
|
|
@ -1,6 +1,9 @@
|
|||
"""
|
||||
Base settings to build other settings files upon.
|
||||
"""
|
||||
# ruff: noqa: ERA001, E501
|
||||
"""Base settings to build other settings files upon."""
|
||||
|
||||
{% if cookiecutter.use_celery == 'y' -%}
|
||||
import ssl
|
||||
{%- endif %}
|
||||
from pathlib import Path
|
||||
|
||||
import environ
|
||||
|
@ -84,6 +87,7 @@ THIRD_PARTY_APPS = [
|
|||
"crispy_bootstrap5",
|
||||
"allauth",
|
||||
"allauth.account",
|
||||
"allauth.mfa",
|
||||
"allauth.socialaccount",
|
||||
{%- if cookiecutter.use_celery == 'y' %}
|
||||
"django_celery_beat",
|
||||
|
@ -137,7 +141,9 @@ PASSWORD_HASHERS = [
|
|||
]
|
||||
# https://docs.djangoproject.com/en/dev/ref/settings/#auth-password-validators
|
||||
AUTH_PASSWORD_VALIDATORS = [
|
||||
{"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator"},
|
||||
{
|
||||
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator",
|
||||
},
|
||||
{"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator"},
|
||||
{"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator"},
|
||||
{"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator"},
|
||||
|
@ -210,7 +216,7 @@ TEMPLATES = [
|
|||
"{{cookiecutter.project_slug}}.users.context_processors.allauth_settings",
|
||||
],
|
||||
},
|
||||
}
|
||||
},
|
||||
]
|
||||
|
||||
# https://docs.djangoproject.com/en/dev/ref/settings/#form-renderer
|
||||
|
@ -274,11 +280,14 @@ LOGGING = {
|
|||
"level": "DEBUG",
|
||||
"class": "logging.StreamHandler",
|
||||
"formatter": "verbose",
|
||||
}
|
||||
},
|
||||
},
|
||||
"root": {"level": "INFO", "handlers": ["console"]},
|
||||
}
|
||||
|
||||
REDIS_URL = env("REDIS_URL", default="redis://{% if cookiecutter.use_docker == 'y' %}redis{%else%}localhost{% endif %}:6379/0")
|
||||
REDIS_SSL = REDIS_URL.startswith("rediss://")
|
||||
|
||||
{% if cookiecutter.use_celery == 'y' -%}
|
||||
# Celery
|
||||
# ------------------------------------------------------------------------------
|
||||
|
@ -286,9 +295,13 @@ if USE_TZ:
|
|||
# https://docs.celeryq.dev/en/stable/userguide/configuration.html#std:setting-timezone
|
||||
CELERY_TIMEZONE = TIME_ZONE
|
||||
# https://docs.celeryq.dev/en/stable/userguide/configuration.html#std:setting-broker_url
|
||||
CELERY_BROKER_URL = env("CELERY_BROKER_URL")
|
||||
CELERY_BROKER_URL = REDIS_URL
|
||||
# https://docs.celeryq.dev/en/stable/userguide/configuration.html#redis-backend-use-ssl
|
||||
CELERY_BROKER_USE_SSL = {"ssl_cert_reqs": ssl.CERT_NONE} if REDIS_SSL else None
|
||||
# https://docs.celeryq.dev/en/stable/userguide/configuration.html#std:setting-result_backend
|
||||
CELERY_RESULT_BACKEND = CELERY_BROKER_URL
|
||||
CELERY_RESULT_BACKEND = REDIS_URL
|
||||
# https://docs.celeryq.dev/en/stable/userguide/configuration.html#redis-backend-use-ssl
|
||||
CELERY_REDIS_BACKEND_USE_SSL = CELERY_BROKER_USE_SSL
|
||||
# https://docs.celeryq.dev/en/stable/userguide/configuration.html#result-extended
|
||||
CELERY_RESULT_EXTENDED = True
|
||||
# https://docs.celeryq.dev/en/stable/userguide/configuration.html#result-backend-always-retry
|
||||
|
@ -314,18 +327,20 @@ CELERY_BEAT_SCHEDULER = "django_celery_beat.schedulers:DatabaseScheduler"
|
|||
CELERY_WORKER_SEND_TASK_EVENTS = True
|
||||
# https://docs.celeryq.dev/en/stable/userguide/configuration.html#std-setting-task_send_sent_event
|
||||
CELERY_TASK_SEND_SENT_EVENT = True
|
||||
# https://docs.celeryq.dev/en/stable/userguide/configuration.html#worker-hijack-root-logger
|
||||
CELERY_WORKER_HIJACK_ROOT_LOGGER = False
|
||||
|
||||
{%- endif %}
|
||||
# django-allauth
|
||||
# ------------------------------------------------------------------------------
|
||||
ACCOUNT_ALLOW_REGISTRATION = env.bool("DJANGO_ACCOUNT_ALLOW_REGISTRATION", True)
|
||||
# https://docs.allauth.org/en/latest/account/configuration.html
|
||||
ACCOUNT_AUTHENTICATION_METHOD = "{{cookiecutter.username_type}}"
|
||||
ACCOUNT_LOGIN_METHODS = {"{{cookiecutter.username_type}}"}
|
||||
# https://docs.allauth.org/en/latest/account/configuration.html
|
||||
ACCOUNT_EMAIL_REQUIRED = True
|
||||
{%- if cookiecutter.username_type == "email" %}
|
||||
# https://docs.allauth.org/en/latest/account/configuration.html
|
||||
ACCOUNT_USERNAME_REQUIRED = False
|
||||
{%- if cookiecutter.username_type == "username" %}
|
||||
ACCOUNT_SIGNUP_FIELDS = ["email*", "username*", "password1*", "password2*"]
|
||||
{%- else %}
|
||||
ACCOUNT_SIGNUP_FIELDS = ["email*", "password1*", "password2*"]
|
||||
# https://docs.allauth.org/en/latest/account/configuration.html
|
||||
ACCOUNT_USER_MODEL_USERNAME_FIELD = None
|
||||
{%- endif %}
|
||||
|
@ -369,6 +384,7 @@ SPECTACULAR_SETTINGS = {
|
|||
"DESCRIPTION": "Documentation of API endpoints of {{ cookiecutter.project_name }}",
|
||||
"VERSION": "1.0.0",
|
||||
"SERVE_PERMISSIONS": ["rest_framework.permissions.IsAdminUser"],
|
||||
"SCHEMA_PATH_PREFIX": "/api/",
|
||||
}
|
||||
{%- endif %}
|
||||
{%- if cookiecutter.frontend_pipeline == 'Webpack' %}
|
||||
|
@ -380,7 +396,7 @@ WEBPACK_LOADER = {
|
|||
"STATS_FILE": BASE_DIR / "webpack-stats.json",
|
||||
"POLL_INTERVAL": 0.1,
|
||||
"IGNORE": [r".+\.hot-update.js", r".+\.map"],
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
{%- endif %}
|
||||
|
|
|
@ -1,4 +1,10 @@
|
|||
from .base import * # noqa
|
||||
# ruff: noqa: E501
|
||||
from .base import * # noqa: F403
|
||||
from .base import INSTALLED_APPS
|
||||
from .base import MIDDLEWARE
|
||||
{%- if cookiecutter.frontend_pipeline == 'Webpack' %}
|
||||
from .base import WEBPACK_LOADER
|
||||
{%- endif %}
|
||||
from .base import env
|
||||
|
||||
# GENERAL
|
||||
|
@ -11,7 +17,7 @@ SECRET_KEY = env(
|
|||
default="!!!SET DJANGO_SECRET_KEY!!!",
|
||||
)
|
||||
# https://docs.djangoproject.com/en/dev/ref/settings/#allowed-hosts
|
||||
ALLOWED_HOSTS = ["localhost", "0.0.0.0", "127.0.0.1"]
|
||||
ALLOWED_HOSTS = ["localhost", "0.0.0.0", "127.0.0.1"] # noqa: S104
|
||||
|
||||
# CACHES
|
||||
# ------------------------------------------------------------------------------
|
||||
|
@ -20,7 +26,7 @@ CACHES = {
|
|||
"default": {
|
||||
"BACKEND": "django.core.cache.backends.locmem.LocMemCache",
|
||||
"LOCATION": "",
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
# EMAIL
|
||||
|
@ -37,7 +43,9 @@ EMAIL_HOST = "localhost"
|
|||
EMAIL_PORT = 1025
|
||||
{%- else -%}
|
||||
# https://docs.djangoproject.com/en/dev/ref/settings/#email-backend
|
||||
EMAIL_BACKEND = env("DJANGO_EMAIL_BACKEND", default="django.core.mail.backends.console.EmailBackend")
|
||||
EMAIL_BACKEND = env(
|
||||
"DJANGO_EMAIL_BACKEND", default="django.core.mail.backends.console.EmailBackend",
|
||||
)
|
||||
{%- endif %}
|
||||
|
||||
{%- if cookiecutter.use_whitenoise == 'y' %}
|
||||
|
@ -45,18 +53,23 @@ EMAIL_BACKEND = env("DJANGO_EMAIL_BACKEND", default="django.core.mail.backends.c
|
|||
# WhiteNoise
|
||||
# ------------------------------------------------------------------------------
|
||||
# http://whitenoise.evans.io/en/latest/django.html#using-whitenoise-in-development
|
||||
INSTALLED_APPS = ["whitenoise.runserver_nostatic"] + INSTALLED_APPS # noqa: F405
|
||||
INSTALLED_APPS = ["whitenoise.runserver_nostatic", *INSTALLED_APPS]
|
||||
{% endif %}
|
||||
|
||||
# django-debug-toolbar
|
||||
# ------------------------------------------------------------------------------
|
||||
# https://django-debug-toolbar.readthedocs.io/en/latest/installation.html#prerequisites
|
||||
INSTALLED_APPS += ["debug_toolbar"] # noqa: F405
|
||||
INSTALLED_APPS += ["debug_toolbar"]
|
||||
# https://django-debug-toolbar.readthedocs.io/en/latest/installation.html#middleware
|
||||
MIDDLEWARE += ["debug_toolbar.middleware.DebugToolbarMiddleware"] # noqa: F405
|
||||
MIDDLEWARE += ["debug_toolbar.middleware.DebugToolbarMiddleware"]
|
||||
# https://django-debug-toolbar.readthedocs.io/en/latest/configuration.html#debug-toolbar-config
|
||||
DEBUG_TOOLBAR_CONFIG = {
|
||||
"DISABLE_PANELS": ["debug_toolbar.panels.redirects.RedirectsPanel"],
|
||||
"DISABLE_PANELS": [
|
||||
"debug_toolbar.panels.redirects.RedirectsPanel",
|
||||
# Disable profiling panel due to an issue with Python 3.12:
|
||||
# https://github.com/jazzband/django-debug-toolbar/issues/1875
|
||||
"debug_toolbar.panels.profiling.ProfilingPanel",
|
||||
],
|
||||
"SHOW_TEMPLATE_CONTEXT": True,
|
||||
}
|
||||
# https://django-debug-toolbar.readthedocs.io/en/latest/installation.html#internal-ips
|
||||
|
@ -75,12 +88,21 @@ if env("USE_DOCKER") == "yes":
|
|||
# The node container isn't started (yet?)
|
||||
pass
|
||||
{%- endif %}
|
||||
{%- if cookiecutter.windows == 'y' %}
|
||||
# RunServerPlus
|
||||
# ------------------------------------------------------------------------------
|
||||
# This is a custom setting for RunServerPlus to fix reloader issue in Windows docker environment
|
||||
# Werkzeug reloader type [auto, watchdog, or stat]
|
||||
RUNSERVERPLUS_POLLER_RELOADER_TYPE = 'stat'
|
||||
# If you have CPU and IO load issues, you can increase this poller interval e.g) 5
|
||||
RUNSERVERPLUS_POLLER_RELOADER_INTERVAL = 1
|
||||
{%- endif %}
|
||||
{%- endif %}
|
||||
|
||||
# django-extensions
|
||||
# ------------------------------------------------------------------------------
|
||||
# https://django-extensions.readthedocs.io/en/latest/installation_instructions.html#configuration
|
||||
INSTALLED_APPS += ["django_extensions"] # noqa: F405
|
||||
INSTALLED_APPS += ["django_extensions"]
|
||||
{% if cookiecutter.use_celery == 'y' -%}
|
||||
|
||||
# Celery
|
||||
|
@ -96,7 +118,7 @@ CELERY_TASK_EAGER_PROPAGATES = True
|
|||
{%- if cookiecutter.frontend_pipeline == 'Webpack' %}
|
||||
# django-webpack-loader
|
||||
# ------------------------------------------------------------------------------
|
||||
WEBPACK_LOADER["DEFAULT"]["CACHE"] = not DEBUG # noqa: F405
|
||||
WEBPACK_LOADER["DEFAULT"]["CACHE"] = not DEBUG
|
||||
|
||||
{%- endif %}
|
||||
# Your stuff...
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
# ruff: noqa: E501
|
||||
{% if cookiecutter.use_sentry == 'y' -%}
|
||||
import logging
|
||||
|
||||
|
@ -12,7 +13,13 @@ from sentry_sdk.integrations.logging import LoggingIntegration
|
|||
from sentry_sdk.integrations.redis import RedisIntegration
|
||||
|
||||
{% endif -%}
|
||||
from .base import * # noqa
|
||||
from .base import * # noqa: F403
|
||||
from .base import DATABASES
|
||||
from .base import INSTALLED_APPS
|
||||
from .base import REDIS_URL
|
||||
{%- if cookiecutter.use_drf == "y" %}
|
||||
from .base import SPECTACULAR_SETTINGS
|
||||
{%- endif %}
|
||||
from .base import env
|
||||
|
||||
# GENERAL
|
||||
|
@ -24,21 +31,21 @@ ALLOWED_HOSTS = env.list("DJANGO_ALLOWED_HOSTS", default=["{{ cookiecutter.domai
|
|||
|
||||
# DATABASES
|
||||
# ------------------------------------------------------------------------------
|
||||
DATABASES["default"]["CONN_MAX_AGE"] = env.int("CONN_MAX_AGE", default=60) # noqa: F405
|
||||
DATABASES["default"]["CONN_MAX_AGE"] = env.int("CONN_MAX_AGE", default=60)
|
||||
|
||||
# CACHES
|
||||
# ------------------------------------------------------------------------------
|
||||
CACHES = {
|
||||
"default": {
|
||||
"BACKEND": "django_redis.cache.RedisCache",
|
||||
"LOCATION": env("REDIS_URL"),
|
||||
"LOCATION": REDIS_URL,
|
||||
"OPTIONS": {
|
||||
"CLIENT_CLASS": "django_redis.client.DefaultClient",
|
||||
# Mimicing memcache behavior.
|
||||
# Mimicking memcache behavior.
|
||||
# https://github.com/jazzband/django-redis#memcached-exceptions-behavior
|
||||
"IGNORE_EXCEPTIONS": True,
|
||||
},
|
||||
}
|
||||
},
|
||||
}
|
||||
|
||||
# SECURITY
|
||||
|
@ -49,25 +56,29 @@ SECURE_PROXY_SSL_HEADER = ("HTTP_X_FORWARDED_PROTO", "https")
|
|||
SECURE_SSL_REDIRECT = env.bool("DJANGO_SECURE_SSL_REDIRECT", default=True)
|
||||
# https://docs.djangoproject.com/en/dev/ref/settings/#session-cookie-secure
|
||||
SESSION_COOKIE_SECURE = True
|
||||
# https://docs.djangoproject.com/en/dev/ref/settings/#session-cookie-name
|
||||
SESSION_COOKIE_NAME = "__Secure-sessionid"
|
||||
# https://docs.djangoproject.com/en/dev/ref/settings/#csrf-cookie-secure
|
||||
CSRF_COOKIE_SECURE = True
|
||||
# https://docs.djangoproject.com/en/dev/ref/settings/#csrf-cookie-name
|
||||
CSRF_COOKIE_NAME = "__Secure-csrftoken"
|
||||
# https://docs.djangoproject.com/en/dev/topics/security/#ssl-https
|
||||
# https://docs.djangoproject.com/en/dev/ref/settings/#secure-hsts-seconds
|
||||
# TODO: set this to 60 seconds first and then to 518400 once you prove the former works
|
||||
SECURE_HSTS_SECONDS = 60
|
||||
# https://docs.djangoproject.com/en/dev/ref/settings/#secure-hsts-include-subdomains
|
||||
SECURE_HSTS_INCLUDE_SUBDOMAINS = env.bool("DJANGO_SECURE_HSTS_INCLUDE_SUBDOMAINS", default=True)
|
||||
SECURE_HSTS_INCLUDE_SUBDOMAINS = env.bool(
|
||||
"DJANGO_SECURE_HSTS_INCLUDE_SUBDOMAINS",
|
||||
default=True,
|
||||
)
|
||||
# https://docs.djangoproject.com/en/dev/ref/settings/#secure-hsts-preload
|
||||
SECURE_HSTS_PRELOAD = env.bool("DJANGO_SECURE_HSTS_PRELOAD", default=True)
|
||||
# https://docs.djangoproject.com/en/dev/ref/middleware/#x-content-type-options-nosniff
|
||||
SECURE_CONTENT_TYPE_NOSNIFF = env.bool("DJANGO_SECURE_CONTENT_TYPE_NOSNIFF", default=True)
|
||||
SECURE_CONTENT_TYPE_NOSNIFF = env.bool(
|
||||
"DJANGO_SECURE_CONTENT_TYPE_NOSNIFF",
|
||||
default=True,
|
||||
)
|
||||
|
||||
{% if cookiecutter.cloud_provider != 'None' -%}
|
||||
# STORAGES
|
||||
# ------------------------------------------------------------------------------
|
||||
# https://django-storages.readthedocs.io/en/latest/#installation
|
||||
INSTALLED_APPS += ["storages"] # noqa: F405
|
||||
{%- endif -%}
|
||||
{% if cookiecutter.cloud_provider == 'AWS' %}
|
||||
# https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html#settings
|
||||
AWS_ACCESS_KEY_ID = env("DJANGO_AWS_ACCESS_KEY_ID")
|
||||
|
@ -103,35 +114,99 @@ AZURE_CONTAINER = env("DJANGO_AZURE_CONTAINER_NAME")
|
|||
{% endif -%}
|
||||
|
||||
{% if cookiecutter.cloud_provider != 'None' or cookiecutter.use_whitenoise == 'y' -%}
|
||||
# STATIC
|
||||
# STATIC & MEDIA
|
||||
# ------------------------
|
||||
{% endif -%}
|
||||
{% if cookiecutter.use_whitenoise == 'y' -%}
|
||||
STATICFILES_STORAGE = "whitenoise.storage.CompressedManifestStaticFilesStorage"
|
||||
{% elif cookiecutter.cloud_provider == 'AWS' -%}
|
||||
STATICFILES_STORAGE = "{{cookiecutter.project_slug}}.utils.storages.StaticS3Storage"
|
||||
COLLECTFAST_STRATEGY = "collectfast.strategies.boto3.Boto3Strategy"
|
||||
STATIC_URL = f"https://{aws_s3_domain}/static/"
|
||||
{% elif cookiecutter.cloud_provider == 'GCP' -%}
|
||||
STATICFILES_STORAGE = "{{cookiecutter.project_slug}}.utils.storages.StaticGoogleCloudStorage"
|
||||
COLLECTFAST_STRATEGY = "collectfast.strategies.gcloud.GoogleCloudStrategy"
|
||||
STATIC_URL = f"https://storage.googleapis.com/{GS_BUCKET_NAME}/static/"
|
||||
{% elif cookiecutter.cloud_provider == 'Azure' -%}
|
||||
STATICFILES_STORAGE = "{{cookiecutter.project_slug}}.utils.storages.StaticAzureStorage"
|
||||
STATIC_URL = f"https://{AZURE_ACCOUNT_NAME}.blob.core.windows.net/static/"
|
||||
{% endif -%}
|
||||
|
||||
# MEDIA
|
||||
# ------------------------------------------------------------------------------
|
||||
{%- if cookiecutter.cloud_provider == 'AWS' %}
|
||||
DEFAULT_FILE_STORAGE = "{{cookiecutter.project_slug}}.utils.storages.MediaS3Storage"
|
||||
MEDIA_URL = f"https://{aws_s3_domain}/media/"
|
||||
STORAGES = {
|
||||
{%- if cookiecutter.use_whitenoise == 'y' and cookiecutter.cloud_provider == 'None' %}
|
||||
"default": {
|
||||
"BACKEND": "django.core.files.storage.FileSystemStorage",
|
||||
},
|
||||
"staticfiles": {
|
||||
"BACKEND": "whitenoise.storage.CompressedManifestStaticFilesStorage",
|
||||
},
|
||||
{%- elif cookiecutter.cloud_provider == 'AWS' %}
|
||||
"default": {
|
||||
"BACKEND": "storages.backends.s3.S3Storage",
|
||||
"OPTIONS": {
|
||||
"location": "media",
|
||||
"file_overwrite": False,
|
||||
},
|
||||
},
|
||||
{%- if cookiecutter.use_whitenoise == 'y' %}
|
||||
"staticfiles": {
|
||||
"BACKEND": "whitenoise.storage.CompressedManifestStaticFilesStorage",
|
||||
},
|
||||
{%- else %}
|
||||
"staticfiles": {
|
||||
"BACKEND": "storages.backends.s3.S3Storage",
|
||||
"OPTIONS": {
|
||||
"location": "static",
|
||||
"default_acl": "public-read",
|
||||
},
|
||||
},
|
||||
{%- endif %}
|
||||
{%- elif cookiecutter.cloud_provider == 'GCP' %}
|
||||
DEFAULT_FILE_STORAGE = "{{cookiecutter.project_slug}}.utils.storages.MediaGoogleCloudStorage"
|
||||
MEDIA_URL = f"https://storage.googleapis.com/{GS_BUCKET_NAME}/media/"
|
||||
"default": {
|
||||
"BACKEND": "storages.backends.gcloud.GoogleCloudStorage",
|
||||
"OPTIONS": {
|
||||
"location": "media",
|
||||
"file_overwrite": False,
|
||||
},
|
||||
},
|
||||
{%- if cookiecutter.use_whitenoise == 'y' %}
|
||||
"staticfiles": {
|
||||
"BACKEND": "whitenoise.storage.CompressedManifestStaticFilesStorage",
|
||||
},
|
||||
{%- else %}
|
||||
"staticfiles": {
|
||||
"BACKEND": "storages.backends.gcloud.GoogleCloudStorage",
|
||||
"OPTIONS": {
|
||||
"location": "static",
|
||||
"default_acl": "publicRead",
|
||||
},
|
||||
},
|
||||
{%- endif %}
|
||||
{%- elif cookiecutter.cloud_provider == 'Azure' %}
|
||||
"default": {
|
||||
"BACKEND": "storages.backends.azure_storage.AzureStorage",
|
||||
"OPTIONS": {
|
||||
"location": "media",
|
||||
"overwrite_files": False,
|
||||
},
|
||||
},
|
||||
{%- if cookiecutter.use_whitenoise == 'y' %}
|
||||
"staticfiles": {
|
||||
"BACKEND": "whitenoise.storage.CompressedManifestStaticFilesStorage",
|
||||
},
|
||||
{%- else %}
|
||||
"staticfiles": {
|
||||
"BACKEND": "storages.backends.azure_storage.AzureStorage",
|
||||
"OPTIONS": {
|
||||
"location": "static",
|
||||
},
|
||||
},
|
||||
{%- endif %}
|
||||
{%- endif %}
|
||||
}
|
||||
{%- endif %}
|
||||
|
||||
{%- if cookiecutter.cloud_provider == 'AWS' %}
|
||||
MEDIA_URL = f"https://{aws_s3_domain}/media/"
|
||||
{%- if cookiecutter.use_whitenoise == 'n' %}
|
||||
COLLECTFASTA_STRATEGY = "collectfasta.strategies.boto3.Boto3Strategy"
|
||||
STATIC_URL = f"https://{aws_s3_domain}/static/"
|
||||
{%- endif %}
|
||||
{%- elif cookiecutter.cloud_provider == 'GCP' %}
|
||||
MEDIA_URL = f"https://storage.googleapis.com/{GS_BUCKET_NAME}/media/"
|
||||
{%- if cookiecutter.use_whitenoise == 'n' %}
|
||||
COLLECTFASTA_STRATEGY = "collectfasta.strategies.gcloud.GoogleCloudStrategy"
|
||||
STATIC_URL = f"https://storage.googleapis.com/{GS_BUCKET_NAME}/static/"
|
||||
{%- endif %}
|
||||
{%- elif cookiecutter.cloud_provider == 'Azure' %}
|
||||
DEFAULT_FILE_STORAGE = "{{cookiecutter.project_slug}}.utils.storages.MediaAzureStorage"
|
||||
MEDIA_URL = f"https://{AZURE_ACCOUNT_NAME}.blob.core.windows.net/media/"
|
||||
{%- if cookiecutter.use_whitenoise == 'n' %}
|
||||
STATIC_URL = f"https://{AZURE_ACCOUNT_NAME}.blob.core.windows.net/static/"
|
||||
{%- endif %}
|
||||
{%- endif %}
|
||||
|
||||
# EMAIL
|
||||
|
@ -148,6 +223,7 @@ EMAIL_SUBJECT_PREFIX = env(
|
|||
"DJANGO_EMAIL_SUBJECT_PREFIX",
|
||||
default="[{{cookiecutter.project_name}}] ",
|
||||
)
|
||||
ACCOUNT_EMAIL_SUBJECT_PREFIX = EMAIL_SUBJECT_PREFIX
|
||||
|
||||
# ADMIN
|
||||
# ------------------------------------------------------------------------------
|
||||
|
@ -157,7 +233,7 @@ ADMIN_URL = env("DJANGO_ADMIN_URL")
|
|||
# Anymail
|
||||
# ------------------------------------------------------------------------------
|
||||
# https://anymail.readthedocs.io/en/stable/installation/#installing-anymail
|
||||
INSTALLED_APPS += ["anymail"] # noqa: F405
|
||||
INSTALLED_APPS += ["anymail"]
|
||||
# https://docs.djangoproject.com/en/dev/ref/settings/#email-backend
|
||||
# https://anymail.readthedocs.io/en/stable/installation/#anymail-settings-reference
|
||||
{%- if cookiecutter.mail_service == 'Mailgun' %}
|
||||
|
@ -200,12 +276,12 @@ ANYMAIL = {
|
|||
"SENDGRID_API_KEY": env("SENDGRID_API_KEY"),
|
||||
"SENDGRID_API_URL": env("SENDGRID_API_URL", default="https://api.sendgrid.com/v3/"),
|
||||
}
|
||||
{%- elif cookiecutter.mail_service == 'SendinBlue' %}
|
||||
# https://anymail.readthedocs.io/en/stable/esps/sendinblue/
|
||||
EMAIL_BACKEND = "anymail.backends.sendinblue.EmailBackend"
|
||||
{%- elif cookiecutter.mail_service == 'Brevo' %}
|
||||
# https://anymail.readthedocs.io/en/stable/esps/brevo/
|
||||
EMAIL_BACKEND = "anymail.backends.brevo.EmailBackend"
|
||||
ANYMAIL = {
|
||||
"SENDINBLUE_API_KEY": env("SENDINBLUE_API_KEY"),
|
||||
"SENDINBLUE_API_URL": env("SENDINBLUE_API_URL", default="https://api.sendinblue.com/v3/"),
|
||||
"BREVO_API_KEY": env("BREVO_API_KEY"),
|
||||
"BREVO_API_URL": env("BREVO_API_URL", default="https://api.brevo.com/v3/"),
|
||||
}
|
||||
{%- elif cookiecutter.mail_service == 'SparkPost' %}
|
||||
# https://anymail.readthedocs.io/en/stable/esps/sparkpost/
|
||||
|
@ -230,10 +306,11 @@ COMPRESS_ENABLED = env.bool("COMPRESS_ENABLED", default=True)
|
|||
COMPRESS_STORAGE = "compressor.storage.GzipCompressorFileStorage"
|
||||
{%- elif cookiecutter.cloud_provider in ('AWS', 'GCP', 'Azure') and cookiecutter.use_whitenoise == 'n' %}
|
||||
# https://django-compressor.readthedocs.io/en/latest/settings/#django.conf.settings.COMPRESS_STORAGE
|
||||
COMPRESS_STORAGE = STATICFILES_STORAGE
|
||||
COMPRESS_STORAGE = STORAGES["staticfiles"]["BACKEND"]
|
||||
{%- endif %}
|
||||
# https://django-compressor.readthedocs.io/en/latest/settings/#django.conf.settings.COMPRESS_URL
|
||||
COMPRESS_URL = STATIC_URL{% if cookiecutter.use_whitenoise == 'y' or cookiecutter.cloud_provider == 'None' %} # noqa: F405{% endif %}
|
||||
COMPRESS_URL = STATIC_URL{% if cookiecutter.use_whitenoise == 'y' or cookiecutter.cloud_provider == 'None' %} # noqa: F405
|
||||
{%- endif -%}
|
||||
{%- if cookiecutter.use_whitenoise == 'y' %}
|
||||
# https://django-compressor.readthedocs.io/en/latest/settings/#django.conf.settings.COMPRESS_OFFLINE
|
||||
COMPRESS_OFFLINE = True # Offline compression is required when using Whitenoise
|
||||
|
@ -247,11 +324,11 @@ COMPRESS_FILTERS = {
|
|||
"js": ["compressor.filters.jsmin.JSMinFilter"],
|
||||
}
|
||||
{% endif %}
|
||||
{%- if cookiecutter.use_whitenoise == 'n' -%}
|
||||
# Collectfast
|
||||
{%- if cookiecutter.use_whitenoise == 'n' and cookiecutter.cloud_provider in ('AWS', 'GCP') -%}
|
||||
# Collectfasta
|
||||
# ------------------------------------------------------------------------------
|
||||
# https://github.com/antonagestam/collectfast#installation
|
||||
INSTALLED_APPS = ["collectfast"] + INSTALLED_APPS # noqa: F405
|
||||
# https://github.com/jasongi/collectfasta#installation
|
||||
INSTALLED_APPS = ["collectfasta", *INSTALLED_APPS]
|
||||
{% endif %}
|
||||
# LOGGING
|
||||
# ------------------------------------------------------------------------------
|
||||
|
@ -311,7 +388,7 @@ LOGGING = {
|
|||
"level": "DEBUG",
|
||||
"class": "logging.StreamHandler",
|
||||
"formatter": "verbose",
|
||||
}
|
||||
},
|
||||
},
|
||||
"root": {"level": "INFO", "handlers": ["console"]},
|
||||
"loggers": {
|
||||
|
@ -363,7 +440,7 @@ sentry_sdk.init(
|
|||
# django-rest-framework
|
||||
# -------------------------------------------------------------------------------
|
||||
# Tools that generate code samples can use SERVERS to point to the correct domain
|
||||
SPECTACULAR_SETTINGS["SERVERS"] = [ # noqa: F405
|
||||
SPECTACULAR_SETTINGS["SERVERS"] = [
|
||||
{"url": "https://{{ cookiecutter.domain_name }}", "description": "Production server"},
|
||||
]
|
||||
|
||||
|
|
|
@ -2,7 +2,8 @@
|
|||
With these settings, tests run faster.
|
||||
"""
|
||||
|
||||
from .base import * # noqa
|
||||
from .base import * # noqa: F403
|
||||
from .base import TEMPLATES
|
||||
from .base import env
|
||||
|
||||
# GENERAL
|
||||
|
@ -27,17 +28,17 @@ EMAIL_BACKEND = "django.core.mail.backends.locmem.EmailBackend"
|
|||
|
||||
# DEBUGGING FOR TEMPLATES
|
||||
# ------------------------------------------------------------------------------
|
||||
TEMPLATES[0]["OPTIONS"]["debug"] = True # type: ignore # noqa: F405
|
||||
TEMPLATES[0]["OPTIONS"]["debug"] = True # type: ignore[index]
|
||||
|
||||
# MEDIA
|
||||
# ------------------------------------------------------------------------------
|
||||
# https://docs.djangoproject.com/en/dev/ref/settings/#media-url
|
||||
MEDIA_URL = 'http://media.testserver'
|
||||
MEDIA_URL = "http://media.testserver/"
|
||||
|
||||
{%- if cookiecutter.frontend_pipeline == 'Webpack' %}
|
||||
# django-webpack-loader
|
||||
# ------------------------------------------------------------------------------
|
||||
WEBPACK_LOADER["DEFAULT"]["LOADER_CLASS"] = "webpack_loader.loader.FakeWebpackLoader" # noqa: F405
|
||||
WEBPACK_LOADER["DEFAULT"]["LOADER_CLASS"] = "webpack_loader.loaders.FakeWebpackLoader" # noqa: F405
|
||||
|
||||
{%- endif %}
|
||||
# Your stuff...
|
||||
|
|
|
@ -4,24 +4,33 @@ from django.contrib import admin
|
|||
{%- if cookiecutter.use_async == 'y' %}
|
||||
from django.contrib.staticfiles.urls import staticfiles_urlpatterns
|
||||
{%- endif %}
|
||||
from django.urls import include, path
|
||||
from django.urls import include
|
||||
from django.urls import path
|
||||
from django.views import defaults as default_views
|
||||
from django.views.generic import TemplateView
|
||||
{%- if cookiecutter.use_drf == 'y' %}
|
||||
from drf_spectacular.views import SpectacularAPIView, SpectacularSwaggerView
|
||||
from drf_spectacular.views import SpectacularAPIView
|
||||
from drf_spectacular.views import SpectacularSwaggerView
|
||||
from rest_framework.authtoken.views import obtain_auth_token
|
||||
{%- endif %}
|
||||
|
||||
urlpatterns = [
|
||||
path("", TemplateView.as_view(template_name="pages/home.html"), name="home"),
|
||||
path("about/", TemplateView.as_view(template_name="pages/about.html"), name="about"),
|
||||
path(
|
||||
"about/",
|
||||
TemplateView.as_view(template_name="pages/about.html"),
|
||||
name="about",
|
||||
),
|
||||
# Django Admin, use {% raw %}{% url 'admin:index' %}{% endraw %}
|
||||
path(settings.ADMIN_URL, admin.site.urls),
|
||||
# User management
|
||||
path("users/", include("{{ cookiecutter.project_slug }}.users.urls", namespace="users")),
|
||||
path("accounts/", include("allauth.urls")),
|
||||
# Your stuff: custom urls includes go here
|
||||
] + static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT)
|
||||
# ...
|
||||
# Media files
|
||||
*static(settings.MEDIA_URL, document_root=settings.MEDIA_ROOT),
|
||||
]
|
||||
{%- if cookiecutter.use_async == 'y' %}
|
||||
if settings.DEBUG:
|
||||
# Static file serving when using Gunicorn + Uvicorn for local web socket development
|
||||
|
@ -33,7 +42,7 @@ urlpatterns += [
|
|||
# API base url
|
||||
path("api/", include("config.api_router")),
|
||||
# DRF auth token
|
||||
path("auth-token/", obtain_auth_token),
|
||||
path("api/auth-token/", obtain_auth_token, name="obtain_auth_token"),
|
||||
path("api/schema/", SpectacularAPIView.as_view(), name="api-schema"),
|
||||
path(
|
||||
"api/docs/",
|
||||
|
@ -67,4 +76,7 @@ if settings.DEBUG:
|
|||
if "debug_toolbar" in settings.INSTALLED_APPS:
|
||||
import debug_toolbar
|
||||
|
||||
urlpatterns = [path("__debug__/", include(debug_toolbar.urls))] + urlpatterns
|
||||
urlpatterns = [
|
||||
path("__debug__/", include(debug_toolbar.urls)),
|
||||
*urlpatterns,
|
||||
]
|
||||
|
|
Some files were not shown because too many files have changed in this diff Show More
Loading…
Reference in New Issue
Block a user