mirror of
https://github.com/cookiecutter/cookiecutter-django.git
synced 2025-03-03 10:45:49 +03:00
Fix inconsistent line length and move configs to pyproject.toml (#4276)
* Fix inconsistent line length and move config to pyproject.toml Fix #2720 * Fix running tox with AUTOFIXABLE_STYLES * Adjust some styles * [pre-commit.ci] auto fixes from pre-commit.com hooks for more information, see https://pre-commit.ci * Adjust more styles * Split isort and flake8 tests --------- Co-authored-by: pre-commit-ci[bot] <66853113+pre-commit-ci[bot]@users.noreply.github.com>
This commit is contained in:
parent
5fb1550c71
commit
17fa459dc3
|
@ -239,8 +239,7 @@ texinfo_documents = [
|
||||||
"Cookiecutter Django documentation",
|
"Cookiecutter Django documentation",
|
||||||
"Daniel Roy Greenfeld",
|
"Daniel Roy Greenfeld",
|
||||||
"Cookiecutter Django",
|
"Cookiecutter Django",
|
||||||
"A Cookiecutter template for creating production-ready "
|
"A Cookiecutter template for creating production-ready " "Django projects quickly.",
|
||||||
"Django projects quickly.",
|
|
||||||
"Miscellaneous",
|
"Miscellaneous",
|
||||||
)
|
)
|
||||||
]
|
]
|
||||||
|
|
|
@ -92,10 +92,7 @@ def remove_utility_files():
|
||||||
def remove_heroku_files():
|
def remove_heroku_files():
|
||||||
file_names = ["Procfile", "runtime.txt", "requirements.txt"]
|
file_names = ["Procfile", "runtime.txt", "requirements.txt"]
|
||||||
for file_name in file_names:
|
for file_name in file_names:
|
||||||
if (
|
if file_name == "requirements.txt" and "{{ cookiecutter.ci_tool }}".lower() == "travis":
|
||||||
file_name == "requirements.txt"
|
|
||||||
and "{{ cookiecutter.ci_tool }}".lower() == "travis"
|
|
||||||
):
|
|
||||||
# don't remove the file if we are using travisci but not using heroku
|
# don't remove the file if we are using travisci but not using heroku
|
||||||
continue
|
continue
|
||||||
os.remove(file_name)
|
os.remove(file_name)
|
||||||
|
@ -197,11 +194,7 @@ def handle_js_runner(choice, use_docker, use_async):
|
||||||
"gulp-uglify-es",
|
"gulp-uglify-es",
|
||||||
]
|
]
|
||||||
if not use_docker:
|
if not use_docker:
|
||||||
dev_django_cmd = (
|
dev_django_cmd = "uvicorn config.asgi:application --reload" if use_async else "python manage.py runserver"
|
||||||
"uvicorn config.asgi:application --reload"
|
|
||||||
if use_async
|
|
||||||
else "python manage.py runserver"
|
|
||||||
)
|
|
||||||
scripts.update(
|
scripts.update(
|
||||||
{
|
{
|
||||||
"dev": "concurrently npm:dev:*",
|
"dev": "concurrently npm:dev:*",
|
||||||
|
@ -219,9 +212,7 @@ def remove_celery_files():
|
||||||
file_names = [
|
file_names = [
|
||||||
os.path.join("config", "celery_app.py"),
|
os.path.join("config", "celery_app.py"),
|
||||||
os.path.join("{{ cookiecutter.project_slug }}", "users", "tasks.py"),
|
os.path.join("{{ cookiecutter.project_slug }}", "users", "tasks.py"),
|
||||||
os.path.join(
|
os.path.join("{{ cookiecutter.project_slug }}", "users", "tests", "test_tasks.py"),
|
||||||
"{{ cookiecutter.project_slug }}", "users", "tests", "test_tasks.py"
|
|
||||||
),
|
|
||||||
]
|
]
|
||||||
for file_name in file_names:
|
for file_name in file_names:
|
||||||
os.remove(file_name)
|
os.remove(file_name)
|
||||||
|
@ -248,9 +239,7 @@ def remove_dotgithub_folder():
|
||||||
shutil.rmtree(".github")
|
shutil.rmtree(".github")
|
||||||
|
|
||||||
|
|
||||||
def generate_random_string(
|
def generate_random_string(length, using_digits=False, using_ascii_letters=False, using_punctuation=False):
|
||||||
length, using_digits=False, using_ascii_letters=False, using_punctuation=False
|
|
||||||
):
|
|
||||||
"""
|
"""
|
||||||
Example:
|
Example:
|
||||||
opting out for 50 symbol-long, [a-z][A-Z][0-9] string
|
opting out for 50 symbol-long, [a-z][A-Z][0-9] string
|
||||||
|
@ -344,9 +333,7 @@ def set_postgres_password(file_path, value=None):
|
||||||
|
|
||||||
|
|
||||||
def set_celery_flower_user(file_path, value):
|
def set_celery_flower_user(file_path, value):
|
||||||
celery_flower_user = set_flag(
|
celery_flower_user = set_flag(file_path, "!!!SET CELERY_FLOWER_USER!!!", value=value)
|
||||||
file_path, "!!!SET CELERY_FLOWER_USER!!!", value=value
|
|
||||||
)
|
|
||||||
return celery_flower_user
|
return celery_flower_user
|
||||||
|
|
||||||
|
|
||||||
|
@ -378,22 +365,14 @@ def set_flags_in_envs(postgres_user, celery_flower_user, debug=False):
|
||||||
set_django_admin_url(production_django_envs_path)
|
set_django_admin_url(production_django_envs_path)
|
||||||
|
|
||||||
set_postgres_user(local_postgres_envs_path, value=postgres_user)
|
set_postgres_user(local_postgres_envs_path, value=postgres_user)
|
||||||
set_postgres_password(
|
set_postgres_password(local_postgres_envs_path, value=DEBUG_VALUE if debug else None)
|
||||||
local_postgres_envs_path, value=DEBUG_VALUE if debug else None
|
|
||||||
)
|
|
||||||
set_postgres_user(production_postgres_envs_path, value=postgres_user)
|
set_postgres_user(production_postgres_envs_path, value=postgres_user)
|
||||||
set_postgres_password(
|
set_postgres_password(production_postgres_envs_path, value=DEBUG_VALUE if debug else None)
|
||||||
production_postgres_envs_path, value=DEBUG_VALUE if debug else None
|
|
||||||
)
|
|
||||||
|
|
||||||
set_celery_flower_user(local_django_envs_path, value=celery_flower_user)
|
set_celery_flower_user(local_django_envs_path, value=celery_flower_user)
|
||||||
set_celery_flower_password(
|
set_celery_flower_password(local_django_envs_path, value=DEBUG_VALUE if debug else None)
|
||||||
local_django_envs_path, value=DEBUG_VALUE if debug else None
|
|
||||||
)
|
|
||||||
set_celery_flower_user(production_django_envs_path, value=celery_flower_user)
|
set_celery_flower_user(production_django_envs_path, value=celery_flower_user)
|
||||||
set_celery_flower_password(
|
set_celery_flower_password(production_django_envs_path, value=DEBUG_VALUE if debug else None)
|
||||||
production_django_envs_path, value=DEBUG_VALUE if debug else None
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def set_flags_in_settings_files():
|
def set_flags_in_settings_files():
|
||||||
|
@ -423,21 +402,9 @@ def remove_aws_dockerfile():
|
||||||
def remove_drf_starter_files():
|
def remove_drf_starter_files():
|
||||||
os.remove(os.path.join("config", "api_router.py"))
|
os.remove(os.path.join("config", "api_router.py"))
|
||||||
shutil.rmtree(os.path.join("{{cookiecutter.project_slug}}", "users", "api"))
|
shutil.rmtree(os.path.join("{{cookiecutter.project_slug}}", "users", "api"))
|
||||||
os.remove(
|
os.remove(os.path.join("{{cookiecutter.project_slug}}", "users", "tests", "test_drf_urls.py"))
|
||||||
os.path.join(
|
os.remove(os.path.join("{{cookiecutter.project_slug}}", "users", "tests", "test_drf_views.py"))
|
||||||
"{{cookiecutter.project_slug}}", "users", "tests", "test_drf_urls.py"
|
os.remove(os.path.join("{{cookiecutter.project_slug}}", "users", "tests", "test_swagger.py"))
|
||||||
)
|
|
||||||
)
|
|
||||||
os.remove(
|
|
||||||
os.path.join(
|
|
||||||
"{{cookiecutter.project_slug}}", "users", "tests", "test_drf_views.py"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
os.remove(
|
|
||||||
os.path.join(
|
|
||||||
"{{cookiecutter.project_slug}}", "users", "tests", "test_swagger.py"
|
|
||||||
)
|
|
||||||
)
|
|
||||||
|
|
||||||
|
|
||||||
def remove_storages_module():
|
def remove_storages_module():
|
||||||
|
@ -470,10 +437,7 @@ def main():
|
||||||
else:
|
else:
|
||||||
remove_docker_files()
|
remove_docker_files()
|
||||||
|
|
||||||
if (
|
if "{{ cookiecutter.use_docker }}".lower() == "y" and "{{ cookiecutter.cloud_provider}}" != "AWS":
|
||||||
"{{ cookiecutter.use_docker }}".lower() == "y"
|
|
||||||
and "{{ cookiecutter.cloud_provider}}" != "AWS"
|
|
||||||
):
|
|
||||||
remove_aws_dockerfile()
|
remove_aws_dockerfile()
|
||||||
|
|
||||||
if "{{ cookiecutter.use_heroku }}".lower() == "n":
|
if "{{ cookiecutter.use_heroku }}".lower() == "n":
|
||||||
|
@ -481,10 +445,7 @@ def main():
|
||||||
elif "{{ cookiecutter.frontend_pipeline }}" != "Django Compressor":
|
elif "{{ cookiecutter.frontend_pipeline }}" != "Django Compressor":
|
||||||
remove_heroku_build_hooks()
|
remove_heroku_build_hooks()
|
||||||
|
|
||||||
if (
|
if "{{ cookiecutter.use_docker }}".lower() == "n" and "{{ cookiecutter.use_heroku }}".lower() == "n":
|
||||||
"{{ cookiecutter.use_docker }}".lower() == "n"
|
|
||||||
and "{{ cookiecutter.use_heroku }}".lower() == "n"
|
|
||||||
):
|
|
||||||
if "{{ cookiecutter.keep_local_envs_in_vcs }}".lower() == "y":
|
if "{{ cookiecutter.keep_local_envs_in_vcs }}".lower() == "y":
|
||||||
print(
|
print(
|
||||||
INFO + ".env(s) are only utilized when Docker Compose and/or "
|
INFO + ".env(s) are only utilized when Docker Compose and/or "
|
||||||
|
@ -512,10 +473,7 @@ def main():
|
||||||
use_async=("{{ cookiecutter.use_async }}".lower() == "y"),
|
use_async=("{{ cookiecutter.use_async }}".lower() == "y"),
|
||||||
)
|
)
|
||||||
|
|
||||||
if (
|
if "{{ cookiecutter.cloud_provider }}" == "None" and "{{ cookiecutter.use_docker }}".lower() == "n":
|
||||||
"{{ cookiecutter.cloud_provider }}" == "None"
|
|
||||||
and "{{ cookiecutter.use_docker }}".lower() == "n"
|
|
||||||
):
|
|
||||||
print(
|
print(
|
||||||
WARNING + "You chose to not use any cloud providers nor Docker, "
|
WARNING + "You chose to not use any cloud providers nor Docker, "
|
||||||
"media files won't be served in production." + TERMINATOR
|
"media files won't be served in production." + TERMINATOR
|
||||||
|
|
|
@ -27,17 +27,11 @@ SUCCESS = "\x1b[1;32m [SUCCESS]: "
|
||||||
|
|
||||||
project_slug = "{{ cookiecutter.project_slug }}"
|
project_slug = "{{ cookiecutter.project_slug }}"
|
||||||
if hasattr(project_slug, "isidentifier"):
|
if hasattr(project_slug, "isidentifier"):
|
||||||
assert (
|
assert project_slug.isidentifier(), "'{}' project slug is not a valid Python identifier.".format(project_slug)
|
||||||
project_slug.isidentifier()
|
|
||||||
), "'{}' project slug is not a valid Python identifier.".format(project_slug)
|
|
||||||
|
|
||||||
assert (
|
assert project_slug == project_slug.lower(), "'{}' project slug should be all lowercase".format(project_slug)
|
||||||
project_slug == project_slug.lower()
|
|
||||||
), "'{}' project slug should be all lowercase".format(project_slug)
|
|
||||||
|
|
||||||
assert (
|
assert "\\" not in "{{ cookiecutter.author_name }}", "Don't include backslashes in author name."
|
||||||
"\\" not in "{{ cookiecutter.author_name }}"
|
|
||||||
), "Don't include backslashes in author name."
|
|
||||||
|
|
||||||
if "{{ cookiecutter.use_docker }}".lower() == "n":
|
if "{{ cookiecutter.use_docker }}".lower() == "n":
|
||||||
python_major_version = sys.version_info[0]
|
python_major_version = sys.version_info[0]
|
||||||
|
@ -59,32 +53,16 @@ if "{{ cookiecutter.use_docker }}".lower() == "n":
|
||||||
print(
|
print(
|
||||||
HINT
|
HINT
|
||||||
+ "Please respond with {} or {}: ".format(
|
+ "Please respond with {} or {}: ".format(
|
||||||
", ".join(
|
", ".join(["'{}'".format(o) for o in yes_options if not o == ""]),
|
||||||
["'{}'".format(o) for o in yes_options if not o == ""]
|
", ".join(["'{}'".format(o) for o in no_options if not o == ""]),
|
||||||
),
|
|
||||||
", ".join(
|
|
||||||
["'{}'".format(o) for o in no_options if not o == ""]
|
|
||||||
),
|
|
||||||
)
|
)
|
||||||
+ TERMINATOR
|
+ TERMINATOR
|
||||||
)
|
)
|
||||||
|
|
||||||
if (
|
if "{{ cookiecutter.use_whitenoise }}".lower() == "n" and "{{ cookiecutter.cloud_provider }}" == "None":
|
||||||
"{{ cookiecutter.use_whitenoise }}".lower() == "n"
|
print("You should either use Whitenoise or select a " "Cloud Provider to serve static files")
|
||||||
and "{{ cookiecutter.cloud_provider }}" == "None"
|
|
||||||
):
|
|
||||||
print(
|
|
||||||
"You should either use Whitenoise or select a "
|
|
||||||
"Cloud Provider to serve static files"
|
|
||||||
)
|
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if (
|
if "{{ cookiecutter.mail_service }}" == "Amazon SES" and "{{ cookiecutter.cloud_provider }}" != "AWS":
|
||||||
"{{ cookiecutter.mail_service }}" == "Amazon SES"
|
print("You should either use AWS or select a different " "Mail Service for sending emails.")
|
||||||
and "{{ cookiecutter.cloud_provider }}" != "AWS"
|
|
||||||
):
|
|
||||||
print(
|
|
||||||
"You should either use AWS or select a different "
|
|
||||||
"Mail Service for sending emails."
|
|
||||||
)
|
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
29
pyproject.toml
Normal file
29
pyproject.toml
Normal file
|
@ -0,0 +1,29 @@
|
||||||
|
# ==== pytest ====
|
||||||
|
[tool.pytest.ini_options]
|
||||||
|
addopts = "-v --tb=short"
|
||||||
|
norecursedirs = [
|
||||||
|
".tox",
|
||||||
|
".git",
|
||||||
|
"*/migrations/*",
|
||||||
|
"*/static/*",
|
||||||
|
"docs",
|
||||||
|
"venv",
|
||||||
|
"*/{{cookiecutter.project_slug}}/*",
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
|
# ==== black ====
|
||||||
|
[tool.black]
|
||||||
|
line-length = 119
|
||||||
|
target-version = ['py311']
|
||||||
|
|
||||||
|
|
||||||
|
# ==== isort ====
|
||||||
|
[tool.isort]
|
||||||
|
profile = "black"
|
||||||
|
line_length = 119
|
||||||
|
known_first_party = [
|
||||||
|
"tests",
|
||||||
|
"scripts",
|
||||||
|
"hooks",
|
||||||
|
]
|
|
@ -1,3 +0,0 @@
|
||||||
[pytest]
|
|
||||||
addopts = -v --tb=short
|
|
||||||
norecursedirs = .tox .git */migrations/* */static/* docs venv */{{cookiecutter.project_slug}}/*
|
|
|
@ -7,7 +7,6 @@ binaryornot==0.4.4
|
||||||
black==23.3.0
|
black==23.3.0
|
||||||
isort==5.12.0
|
isort==5.12.0
|
||||||
flake8==6.0.0
|
flake8==6.0.0
|
||||||
flake8-isort==6.0.0
|
|
||||||
pre-commit==3.2.2
|
pre-commit==3.2.2
|
||||||
|
|
||||||
# Testing
|
# Testing
|
||||||
|
|
|
@ -141,9 +141,7 @@ class GitHubManager:
|
||||||
self.requirements_files = ["base", "local", "production"]
|
self.requirements_files = ["base", "local", "production"]
|
||||||
# Format:
|
# Format:
|
||||||
# requirement file name: {package name: (master_version, package_info)}
|
# requirement file name: {package name: (master_version, package_info)}
|
||||||
self.requirements: dict[str, dict[str, tuple[str, dict]]] = {
|
self.requirements: dict[str, dict[str, tuple[str, dict]]] = {x: {} for x in self.requirements_files}
|
||||||
x: {} for x in self.requirements_files
|
|
||||||
}
|
|
||||||
|
|
||||||
def setup(self) -> None:
|
def setup(self) -> None:
|
||||||
self.load_requirements()
|
self.load_requirements()
|
||||||
|
@ -177,11 +175,7 @@ class GitHubManager:
|
||||||
"is": "issue",
|
"is": "issue",
|
||||||
"in": "title",
|
"in": "title",
|
||||||
}
|
}
|
||||||
issues = list(
|
issues = list(self.github.search_issues("[Django Update]", "created", "desc", **qualifiers))
|
||||||
self.github.search_issues(
|
|
||||||
"[Django Update]", "created", "desc", **qualifiers
|
|
||||||
)
|
|
||||||
)
|
|
||||||
print(f"Found {len(issues)} issues matching search")
|
print(f"Found {len(issues)} issues matching search")
|
||||||
for issue in issues:
|
for issue in issues:
|
||||||
matches = re.match(r"\[Update Django] Django (\d+.\d+)$", issue.title)
|
matches = re.match(r"\[Update Django] Django (\d+.\d+)$", issue.title)
|
||||||
|
@ -194,9 +188,7 @@ class GitHubManager:
|
||||||
else:
|
else:
|
||||||
self.existing_issues[issue_version] = issue
|
self.existing_issues[issue_version] = issue
|
||||||
|
|
||||||
def get_compatibility(
|
def get_compatibility(self, package_name: str, package_info: dict, needed_dj_version: DjVersion):
|
||||||
self, package_name: str, package_info: dict, needed_dj_version: DjVersion
|
|
||||||
):
|
|
||||||
"""
|
"""
|
||||||
Verify compatibility via setup.py classifiers. If Django is not in the
|
Verify compatibility via setup.py classifiers. If Django is not in the
|
||||||
classifiers, then default compatibility is n/a and OK is ✅.
|
classifiers, then default compatibility is n/a and OK is ✅.
|
||||||
|
@ -209,9 +201,7 @@ class GitHubManager:
|
||||||
# updated packages, or known releases that will happen but haven't yet
|
# updated packages, or known releases that will happen but haven't yet
|
||||||
if issue := self.existing_issues.get(needed_dj_version):
|
if issue := self.existing_issues.get(needed_dj_version):
|
||||||
if index := issue.body.find(package_name):
|
if index := issue.body.find(package_name):
|
||||||
name, _current, prev_compat, ok = (
|
name, _current, prev_compat, ok = (s.strip() for s in issue.body[index:].split("|", 4)[:4])
|
||||||
s.strip() for s in issue.body[index:].split("|", 4)[:4]
|
|
||||||
)
|
|
||||||
if ok in ("✅", "❓", "🕒"):
|
if ok in ("✅", "❓", "🕒"):
|
||||||
return prev_compat, ok
|
return prev_compat, ok
|
||||||
|
|
||||||
|
@ -248,9 +238,7 @@ class GitHubManager:
|
||||||
]
|
]
|
||||||
|
|
||||||
def _get_md_home_page_url(self, package_info: dict):
|
def _get_md_home_page_url(self, package_info: dict):
|
||||||
urls = [
|
urls = [package_info["info"].get(url_key) for url_key in self.HOME_PAGE_URL_KEYS]
|
||||||
package_info["info"].get(url_key) for url_key in self.HOME_PAGE_URL_KEYS
|
|
||||||
]
|
|
||||||
try:
|
try:
|
||||||
return f"[{{}}]({next(item for item in urls if item)})"
|
return f"[{{}}]({next(item for item in urls if item)})"
|
||||||
except StopIteration:
|
except StopIteration:
|
||||||
|
@ -259,13 +247,9 @@ class GitHubManager:
|
||||||
def generate_markdown(self, needed_dj_version: DjVersion):
|
def generate_markdown(self, needed_dj_version: DjVersion):
|
||||||
requirements = f"{needed_dj_version} requirements tables\n\n"
|
requirements = f"{needed_dj_version} requirements tables\n\n"
|
||||||
for _file in self.requirements_files:
|
for _file in self.requirements_files:
|
||||||
requirements += _TABLE_HEADER.format_map(
|
requirements += _TABLE_HEADER.format_map({"file": _file, "dj_version": needed_dj_version})
|
||||||
{"file": _file, "dj_version": needed_dj_version}
|
|
||||||
)
|
|
||||||
for package_name, (version, info) in self.requirements[_file].items():
|
for package_name, (version, info) in self.requirements[_file].items():
|
||||||
compat_version, icon = self.get_compatibility(
|
compat_version, icon = self.get_compatibility(package_name, info, needed_dj_version)
|
||||||
package_name, info, needed_dj_version
|
|
||||||
)
|
|
||||||
requirements += (
|
requirements += (
|
||||||
f"| {self._get_md_home_page_url(info).format(package_name)} "
|
f"| {self._get_md_home_page_url(info).format(package_name)} "
|
||||||
f"| {version.strip()} "
|
f"| {version.strip()} "
|
||||||
|
@ -282,9 +266,7 @@ class GitHubManager:
|
||||||
issue.edit(body=description)
|
issue.edit(body=description)
|
||||||
else:
|
else:
|
||||||
print(f"Creating new issue for Django {needed_dj_version}")
|
print(f"Creating new issue for Django {needed_dj_version}")
|
||||||
issue = self.repo.create_issue(
|
issue = self.repo.create_issue(f"[Update Django] Django {needed_dj_version}", description)
|
||||||
f"[Update Django] Django {needed_dj_version}", description
|
|
||||||
)
|
|
||||||
issue.add_to_labels(f"django{needed_dj_version}")
|
issue.add_to_labels(f"django{needed_dj_version}")
|
||||||
|
|
||||||
def generate(self):
|
def generate(self):
|
||||||
|
@ -297,9 +279,7 @@ class GitHubManager:
|
||||||
|
|
||||||
def main(django_max_version=None) -> None:
|
def main(django_max_version=None) -> None:
|
||||||
# Check if there are any djs
|
# Check if there are any djs
|
||||||
current_dj, latest_djs = get_all_latest_django_versions(
|
current_dj, latest_djs = get_all_latest_django_versions(django_max_version=django_max_version)
|
||||||
django_max_version=django_max_version
|
|
||||||
)
|
|
||||||
if not latest_djs:
|
if not latest_djs:
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
manager = GitHubManager(current_dj, latest_djs)
|
manager = GitHubManager(current_dj, latest_djs)
|
||||||
|
@ -309,9 +289,7 @@ def main(django_max_version=None) -> None:
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
if GITHUB_REPO is None:
|
if GITHUB_REPO is None:
|
||||||
raise RuntimeError(
|
raise RuntimeError("No github repo, please set the environment variable GITHUB_REPOSITORY")
|
||||||
"No github repo, please set the environment variable GITHUB_REPOSITORY"
|
|
||||||
)
|
|
||||||
max_version = None
|
max_version = None
|
||||||
last_arg = sys.argv[-1]
|
last_arg = sys.argv[-1]
|
||||||
if CURRENT_FILE.name not in last_arg:
|
if CURRENT_FILE.name not in last_arg:
|
||||||
|
|
|
@ -154,11 +154,7 @@ def update_git_repo(paths: list[Path], release: str) -> None:
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
if GITHUB_REPO is None:
|
if GITHUB_REPO is None:
|
||||||
raise RuntimeError(
|
raise RuntimeError("No github repo, please set the environment variable GITHUB_REPOSITORY")
|
||||||
"No github repo, please set the environment variable GITHUB_REPOSITORY"
|
|
||||||
)
|
|
||||||
if GIT_BRANCH is None:
|
if GIT_BRANCH is None:
|
||||||
raise RuntimeError(
|
raise RuntimeError("No git branch set, please set the GITHUB_REF_NAME environment variable")
|
||||||
"No git branch set, please set the GITHUB_REF_NAME environment variable"
|
|
||||||
)
|
|
||||||
main()
|
main()
|
||||||
|
|
|
@ -44,15 +44,9 @@ def iter_recent_authors():
|
||||||
git CLI to work with Github usernames.
|
git CLI to work with Github usernames.
|
||||||
"""
|
"""
|
||||||
repo = Github(login_or_token=GITHUB_TOKEN, per_page=5).get_repo(GITHUB_REPO)
|
repo = Github(login_or_token=GITHUB_TOKEN, per_page=5).get_repo(GITHUB_REPO)
|
||||||
recent_pulls = repo.get_pulls(
|
recent_pulls = repo.get_pulls(state="closed", sort="updated", direction="desc").get_page(0)
|
||||||
state="closed", sort="updated", direction="desc"
|
|
||||||
).get_page(0)
|
|
||||||
for pull in recent_pulls:
|
for pull in recent_pulls:
|
||||||
if (
|
if pull.merged and pull.user.type == "User" and pull.user.login not in BOT_LOGINS:
|
||||||
pull.merged
|
|
||||||
and pull.user.type == "User"
|
|
||||||
and pull.user.login not in BOT_LOGINS
|
|
||||||
):
|
|
||||||
yield pull.user
|
yield pull.user
|
||||||
|
|
||||||
|
|
||||||
|
@ -96,9 +90,7 @@ def write_md_file(contributors):
|
||||||
core_contributors = [c for c in contributors if c.get("is_core", False)]
|
core_contributors = [c for c in contributors if c.get("is_core", False)]
|
||||||
other_contributors = (c for c in contributors if not c.get("is_core", False))
|
other_contributors = (c for c in contributors if not c.get("is_core", False))
|
||||||
other_contributors = sorted(other_contributors, key=lambda c: c["name"].lower())
|
other_contributors = sorted(other_contributors, key=lambda c: c["name"].lower())
|
||||||
content = template.render(
|
content = template.render(core_contributors=core_contributors, other_contributors=other_contributors)
|
||||||
core_contributors=core_contributors, other_contributors=other_contributors
|
|
||||||
)
|
|
||||||
|
|
||||||
file_path = ROOT / "CONTRIBUTORS.md"
|
file_path = ROOT / "CONTRIBUTORS.md"
|
||||||
file_path.write_text(content)
|
file_path.write_text(content)
|
||||||
|
@ -106,7 +98,5 @@ def write_md_file(contributors):
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
if GITHUB_REPO is None:
|
if GITHUB_REPO is None:
|
||||||
raise RuntimeError(
|
raise RuntimeError("No github repo, please set the environment variable GITHUB_REPOSITORY")
|
||||||
"No github repo, please set the environment variable GITHUB_REPOSITORY"
|
|
||||||
)
|
|
||||||
main()
|
main()
|
||||||
|
|
|
@ -1,7 +0,0 @@
|
||||||
[flake8]
|
|
||||||
exclude = docs
|
|
||||||
max-line-length = 88
|
|
||||||
|
|
||||||
[isort]
|
|
||||||
profile = black
|
|
||||||
known_first_party = tests,scripts,hooks
|
|
5
setup.py
5
setup.py
|
@ -13,10 +13,7 @@ with open("README.rst") as readme_file:
|
||||||
setup(
|
setup(
|
||||||
name="cookiecutter-django",
|
name="cookiecutter-django",
|
||||||
version=version,
|
version=version,
|
||||||
description=(
|
description=("A Cookiecutter template for creating production-ready " "Django projects quickly."),
|
||||||
"A Cookiecutter template for creating production-ready "
|
|
||||||
"Django projects quickly."
|
|
||||||
),
|
|
||||||
long_description=long_description,
|
long_description=long_description,
|
||||||
author="Daniel Roy Greenfeld",
|
author="Daniel Roy Greenfeld",
|
||||||
author_email="pydanny@gmail.com",
|
author_email="pydanny@gmail.com",
|
||||||
|
|
|
@ -23,7 +23,7 @@ elif sys.platform.startswith("darwin") and os.getenv("CI"):
|
||||||
# Run auto-fixable styles checks - skipped on CI by default. These can be fixed
|
# Run auto-fixable styles checks - skipped on CI by default. These can be fixed
|
||||||
# automatically by running pre-commit after generation however they are tedious
|
# automatically by running pre-commit after generation however they are tedious
|
||||||
# to fix in the template, so we don't insist too much in fixing them.
|
# to fix in the template, so we don't insist too much in fixing them.
|
||||||
AUTOFIXABLE_STYLES = os.getenv("AUTOFIXABLE_STYLES") == 1
|
AUTOFIXABLE_STYLES = os.getenv("AUTOFIXABLE_STYLES") == "1"
|
||||||
|
|
||||||
|
|
||||||
@pytest.fixture
|
@pytest.fixture
|
||||||
|
@ -144,11 +144,7 @@ def _fixture_id(ctx):
|
||||||
|
|
||||||
def build_files_list(base_dir):
|
def build_files_list(base_dir):
|
||||||
"""Build a list containing absolute paths to the generated files."""
|
"""Build a list containing absolute paths to the generated files."""
|
||||||
return [
|
return [os.path.join(dirpath, file_path) for dirpath, subdirs, files in os.walk(base_dir) for file_path in files]
|
||||||
os.path.join(dirpath, file_path)
|
|
||||||
for dirpath, subdirs, files in os.walk(base_dir)
|
|
||||||
for file_path in files
|
|
||||||
]
|
|
||||||
|
|
||||||
|
|
||||||
def check_paths(paths):
|
def check_paths(paths):
|
||||||
|
@ -208,6 +204,18 @@ def test_black_passes(cookies, context_override):
|
||||||
pytest.fail(e.stdout.decode())
|
pytest.fail(e.stdout.decode())
|
||||||
|
|
||||||
|
|
||||||
|
@pytest.mark.skipif(not AUTOFIXABLE_STYLES, reason="isort is auto-fixable")
|
||||||
|
@pytest.mark.parametrize("context_override", SUPPORTED_COMBINATIONS, ids=_fixture_id)
|
||||||
|
def test_isort_passes(cookies, context_override):
|
||||||
|
"""Check whether generated project passes isort style."""
|
||||||
|
result = cookies.bake(extra_context=context_override)
|
||||||
|
|
||||||
|
try:
|
||||||
|
sh.isort(_cwd=str(result.project_path))
|
||||||
|
except sh.ErrorReturnCode as e:
|
||||||
|
pytest.fail(e.stdout.decode())
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
["use_docker", "expected_test_script"],
|
["use_docker", "expected_test_script"],
|
||||||
[
|
[
|
||||||
|
@ -240,9 +248,7 @@ def test_travis_invokes_pytest(cookies, context, use_docker, expected_test_scrip
|
||||||
("y", "docker-compose -f local.yml run django pytest"),
|
("y", "docker-compose -f local.yml run django pytest"),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
def test_gitlab_invokes_precommit_and_pytest(
|
def test_gitlab_invokes_precommit_and_pytest(cookies, context, use_docker, expected_test_script):
|
||||||
cookies, context, use_docker, expected_test_script
|
|
||||||
):
|
|
||||||
context.update({"ci_tool": "Gitlab", "use_docker": use_docker})
|
context.update({"ci_tool": "Gitlab", "use_docker": use_docker})
|
||||||
result = cookies.bake(extra_context=context)
|
result = cookies.bake(extra_context=context)
|
||||||
|
|
||||||
|
@ -269,9 +275,7 @@ def test_gitlab_invokes_precommit_and_pytest(
|
||||||
("y", "docker-compose -f local.yml run django pytest"),
|
("y", "docker-compose -f local.yml run django pytest"),
|
||||||
],
|
],
|
||||||
)
|
)
|
||||||
def test_github_invokes_linter_and_pytest(
|
def test_github_invokes_linter_and_pytest(cookies, context, use_docker, expected_test_script):
|
||||||
cookies, context, use_docker, expected_test_script
|
|
||||||
):
|
|
||||||
context.update({"ci_tool": "Github", "use_docker": use_docker})
|
context.update({"ci_tool": "Github", "use_docker": use_docker})
|
||||||
result = cookies.bake(extra_context=context)
|
result = cookies.bake(extra_context=context)
|
||||||
|
|
||||||
|
|
|
@ -22,7 +22,5 @@ def test_append_to_gitignore_file(working_directory):
|
||||||
gitignore_file.write_text("node_modules/\n")
|
gitignore_file.write_text("node_modules/\n")
|
||||||
append_to_gitignore_file(".envs/*")
|
append_to_gitignore_file(".envs/*")
|
||||||
linesep = os.linesep.encode()
|
linesep = os.linesep.encode()
|
||||||
assert (
|
assert gitignore_file.read_bytes() == b"node_modules/" + linesep + b".envs/*" + linesep
|
||||||
gitignore_file.read_bytes() == b"node_modules/" + linesep + b".envs/*" + linesep
|
|
||||||
)
|
|
||||||
assert gitignore_file.read_text() == "node_modules/\n.envs/*\n"
|
assert gitignore_file.read_text() == "node_modules/\n.envs/*\n"
|
||||||
|
|
1
tox.ini
1
tox.ini
|
@ -4,6 +4,7 @@ envlist = py311,black-template
|
||||||
|
|
||||||
[testenv]
|
[testenv]
|
||||||
deps = -rrequirements.txt
|
deps = -rrequirements.txt
|
||||||
|
passenv = AUTOFIXABLE_STYLES
|
||||||
commands = pytest {posargs:./tests}
|
commands = pytest {posargs:./tests}
|
||||||
|
|
||||||
[testenv:black-template]
|
[testenv:black-template]
|
||||||
|
|
|
@ -44,8 +44,6 @@ repos:
|
||||||
rev: 6.0.0
|
rev: 6.0.0
|
||||||
hooks:
|
hooks:
|
||||||
- id: flake8
|
- id: flake8
|
||||||
args: ['--config=setup.cfg']
|
|
||||||
additional_dependencies: [flake8-isort]
|
|
||||||
|
|
||||||
# sets up .pre-commit-ci.yaml to ensure pre-commit dependencies stay up to date
|
# sets up .pre-commit-ci.yaml to ensure pre-commit dependencies stay up to date
|
||||||
ci:
|
ci:
|
||||||
|
|
|
@ -1,14 +0,0 @@
|
||||||
[MASTER]
|
|
||||||
load-plugins=pylint_django{% if cookiecutter.use_celery == "y" %}, pylint_celery{% endif %}
|
|
||||||
django-settings-module=config.settings.local
|
|
||||||
[FORMAT]
|
|
||||||
max-line-length=120
|
|
||||||
|
|
||||||
[MESSAGES CONTROL]
|
|
||||||
disable=missing-docstring,invalid-name
|
|
||||||
|
|
||||||
[DESIGN]
|
|
||||||
max-parents=13
|
|
||||||
|
|
||||||
[TYPECHECK]
|
|
||||||
generated-members=REQUEST,acl_users,aq_parent,"[a-zA-Z]+_set{1,2}",save,delete
|
|
|
@ -130,9 +130,7 @@ PASSWORD_HASHERS = [
|
||||||
]
|
]
|
||||||
# https://docs.djangoproject.com/en/dev/ref/settings/#auth-password-validators
|
# https://docs.djangoproject.com/en/dev/ref/settings/#auth-password-validators
|
||||||
AUTH_PASSWORD_VALIDATORS = [
|
AUTH_PASSWORD_VALIDATORS = [
|
||||||
{
|
{"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator"},
|
||||||
"NAME": "django.contrib.auth.password_validation.UserAttributeSimilarityValidator"
|
|
||||||
},
|
|
||||||
{"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator"},
|
{"NAME": "django.contrib.auth.password_validation.MinimumLengthValidator"},
|
||||||
{"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator"},
|
{"NAME": "django.contrib.auth.password_validation.CommonPasswordValidator"},
|
||||||
{"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator"},
|
{"NAME": "django.contrib.auth.password_validation.NumericPasswordValidator"},
|
||||||
|
@ -257,9 +255,8 @@ LOGGING = {
|
||||||
"disable_existing_loggers": False,
|
"disable_existing_loggers": False,
|
||||||
"formatters": {
|
"formatters": {
|
||||||
"verbose": {
|
"verbose": {
|
||||||
"format": "%(levelname)s %(asctime)s %(module)s "
|
"format": "%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s",
|
||||||
"%(process)d %(thread)d %(message)s"
|
},
|
||||||
}
|
|
||||||
},
|
},
|
||||||
"handlers": {
|
"handlers": {
|
||||||
"console": {
|
"console": {
|
||||||
|
|
|
@ -37,9 +37,7 @@ EMAIL_HOST = "localhost"
|
||||||
EMAIL_PORT = 1025
|
EMAIL_PORT = 1025
|
||||||
{%- else -%}
|
{%- else -%}
|
||||||
# https://docs.djangoproject.com/en/dev/ref/settings/#email-backend
|
# https://docs.djangoproject.com/en/dev/ref/settings/#email-backend
|
||||||
EMAIL_BACKEND = env(
|
EMAIL_BACKEND = env("DJANGO_EMAIL_BACKEND", default="django.core.mail.backends.console.EmailBackend")
|
||||||
"DJANGO_EMAIL_BACKEND", default="django.core.mail.backends.console.EmailBackend"
|
|
||||||
)
|
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
||||||
{%- if cookiecutter.use_whitenoise == 'y' %}
|
{%- if cookiecutter.use_whitenoise == 'y' %}
|
||||||
|
|
|
@ -56,15 +56,11 @@ CSRF_COOKIE_SECURE = True
|
||||||
# TODO: set this to 60 seconds first and then to 518400 once you prove the former works
|
# TODO: set this to 60 seconds first and then to 518400 once you prove the former works
|
||||||
SECURE_HSTS_SECONDS = 60
|
SECURE_HSTS_SECONDS = 60
|
||||||
# https://docs.djangoproject.com/en/dev/ref/settings/#secure-hsts-include-subdomains
|
# https://docs.djangoproject.com/en/dev/ref/settings/#secure-hsts-include-subdomains
|
||||||
SECURE_HSTS_INCLUDE_SUBDOMAINS = env.bool(
|
SECURE_HSTS_INCLUDE_SUBDOMAINS = env.bool("DJANGO_SECURE_HSTS_INCLUDE_SUBDOMAINS", default=True)
|
||||||
"DJANGO_SECURE_HSTS_INCLUDE_SUBDOMAINS", default=True
|
|
||||||
)
|
|
||||||
# https://docs.djangoproject.com/en/dev/ref/settings/#secure-hsts-preload
|
# https://docs.djangoproject.com/en/dev/ref/settings/#secure-hsts-preload
|
||||||
SECURE_HSTS_PRELOAD = env.bool("DJANGO_SECURE_HSTS_PRELOAD", default=True)
|
SECURE_HSTS_PRELOAD = env.bool("DJANGO_SECURE_HSTS_PRELOAD", default=True)
|
||||||
# https://docs.djangoproject.com/en/dev/ref/middleware/#x-content-type-options-nosniff
|
# https://docs.djangoproject.com/en/dev/ref/middleware/#x-content-type-options-nosniff
|
||||||
SECURE_CONTENT_TYPE_NOSNIFF = env.bool(
|
SECURE_CONTENT_TYPE_NOSNIFF = env.bool("DJANGO_SECURE_CONTENT_TYPE_NOSNIFF", default=True)
|
||||||
"DJANGO_SECURE_CONTENT_TYPE_NOSNIFF", default=True
|
|
||||||
)
|
|
||||||
|
|
||||||
{% if cookiecutter.cloud_provider != 'None' -%}
|
{% if cookiecutter.cloud_provider != 'None' -%}
|
||||||
# STORAGES
|
# STORAGES
|
||||||
|
@ -85,7 +81,7 @@ AWS_QUERYSTRING_AUTH = False
|
||||||
_AWS_EXPIRY = 60 * 60 * 24 * 7
|
_AWS_EXPIRY = 60 * 60 * 24 * 7
|
||||||
# https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html#settings
|
# https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html#settings
|
||||||
AWS_S3_OBJECT_PARAMETERS = {
|
AWS_S3_OBJECT_PARAMETERS = {
|
||||||
"CacheControl": f"max-age={_AWS_EXPIRY}, s-maxage={_AWS_EXPIRY}, must-revalidate"
|
"CacheControl": f"max-age={_AWS_EXPIRY}, s-maxage={_AWS_EXPIRY}, must-revalidate",
|
||||||
}
|
}
|
||||||
# https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html#settings
|
# https://django-storages.readthedocs.io/en/latest/backends/amazon-S3.html#settings
|
||||||
AWS_S3_MAX_MEMORY_SIZE = env.int(
|
AWS_S3_MAX_MEMORY_SIZE = env.int(
|
||||||
|
@ -188,9 +184,7 @@ ANYMAIL = {
|
||||||
EMAIL_BACKEND = "anymail.backends.mandrill.EmailBackend"
|
EMAIL_BACKEND = "anymail.backends.mandrill.EmailBackend"
|
||||||
ANYMAIL = {
|
ANYMAIL = {
|
||||||
"MANDRILL_API_KEY": env("MANDRILL_API_KEY"),
|
"MANDRILL_API_KEY": env("MANDRILL_API_KEY"),
|
||||||
"MANDRILL_API_URL": env(
|
"MANDRILL_API_URL": env("MANDRILL_API_URL", default="https://mandrillapp.com/api/1.0"),
|
||||||
"MANDRILL_API_URL", default="https://mandrillapp.com/api/1.0"
|
|
||||||
),
|
|
||||||
}
|
}
|
||||||
{%- elif cookiecutter.mail_service == 'Postmark' %}
|
{%- elif cookiecutter.mail_service == 'Postmark' %}
|
||||||
# https://anymail.readthedocs.io/en/stable/esps/postmark/
|
# https://anymail.readthedocs.io/en/stable/esps/postmark/
|
||||||
|
@ -211,18 +205,14 @@ ANYMAIL = {
|
||||||
EMAIL_BACKEND = "anymail.backends.sendinblue.EmailBackend"
|
EMAIL_BACKEND = "anymail.backends.sendinblue.EmailBackend"
|
||||||
ANYMAIL = {
|
ANYMAIL = {
|
||||||
"SENDINBLUE_API_KEY": env("SENDINBLUE_API_KEY"),
|
"SENDINBLUE_API_KEY": env("SENDINBLUE_API_KEY"),
|
||||||
"SENDINBLUE_API_URL": env(
|
"SENDINBLUE_API_URL": env("SENDINBLUE_API_URL", default="https://api.sendinblue.com/v3/"),
|
||||||
"SENDINBLUE_API_URL", default="https://api.sendinblue.com/v3/"
|
|
||||||
),
|
|
||||||
}
|
}
|
||||||
{%- elif cookiecutter.mail_service == 'SparkPost' %}
|
{%- elif cookiecutter.mail_service == 'SparkPost' %}
|
||||||
# https://anymail.readthedocs.io/en/stable/esps/sparkpost/
|
# https://anymail.readthedocs.io/en/stable/esps/sparkpost/
|
||||||
EMAIL_BACKEND = "anymail.backends.sparkpost.EmailBackend"
|
EMAIL_BACKEND = "anymail.backends.sparkpost.EmailBackend"
|
||||||
ANYMAIL = {
|
ANYMAIL = {
|
||||||
"SPARKPOST_API_KEY": env("SPARKPOST_API_KEY"),
|
"SPARKPOST_API_KEY": env("SPARKPOST_API_KEY"),
|
||||||
"SPARKPOST_API_URL": env(
|
"SPARKPOST_API_URL": env("SPARKPOST_API_URL", default="https://api.sparkpost.com/api/v1"),
|
||||||
"SPARKPOST_API_URL", default="https://api.sparkpost.com/api/v1"
|
|
||||||
),
|
|
||||||
}
|
}
|
||||||
{%- elif cookiecutter.mail_service == 'Other SMTP' %}
|
{%- elif cookiecutter.mail_service == 'Other SMTP' %}
|
||||||
# https://anymail.readthedocs.io/en/stable/esps
|
# https://anymail.readthedocs.io/en/stable/esps
|
||||||
|
@ -278,9 +268,8 @@ LOGGING = {
|
||||||
"filters": {"require_debug_false": {"()": "django.utils.log.RequireDebugFalse"}},
|
"filters": {"require_debug_false": {"()": "django.utils.log.RequireDebugFalse"}},
|
||||||
"formatters": {
|
"formatters": {
|
||||||
"verbose": {
|
"verbose": {
|
||||||
"format": "%(levelname)s %(asctime)s %(module)s "
|
"format": "%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s",
|
||||||
"%(process)d %(thread)d %(message)s"
|
},
|
||||||
}
|
|
||||||
},
|
},
|
||||||
"handlers": {
|
"handlers": {
|
||||||
"mail_admins": {
|
"mail_admins": {
|
||||||
|
@ -314,9 +303,8 @@ LOGGING = {
|
||||||
"disable_existing_loggers": True,
|
"disable_existing_loggers": True,
|
||||||
"formatters": {
|
"formatters": {
|
||||||
"verbose": {
|
"verbose": {
|
||||||
"format": "%(levelname)s %(asctime)s %(module)s "
|
"format": "%(levelname)s %(asctime)s %(module)s %(process)d %(thread)d %(message)s",
|
||||||
"%(process)d %(thread)d %(message)s"
|
},
|
||||||
}
|
|
||||||
},
|
},
|
||||||
"handlers": {
|
"handlers": {
|
||||||
"console": {
|
"console": {
|
||||||
|
@ -376,7 +364,7 @@ sentry_sdk.init(
|
||||||
# -------------------------------------------------------------------------------
|
# -------------------------------------------------------------------------------
|
||||||
# Tools that generate code samples can use SERVERS to point to the correct domain
|
# Tools that generate code samples can use SERVERS to point to the correct domain
|
||||||
SPECTACULAR_SETTINGS["SERVERS"] = [ # noqa: F405
|
SPECTACULAR_SETTINGS["SERVERS"] = [ # noqa: F405
|
||||||
{"url": "https://{{ cookiecutter.domain_name }}", "description": "Production server"}
|
{"url": "https://{{ cookiecutter.domain_name }}", "description": "Production server"},
|
||||||
]
|
]
|
||||||
|
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
|
@ -32,9 +32,7 @@ TEMPLATES[0]["OPTIONS"]["debug"] = True # type: ignore # noqa: F405
|
||||||
{%- if cookiecutter.frontend_pipeline == 'Webpack' %}
|
{%- if cookiecutter.frontend_pipeline == 'Webpack' %}
|
||||||
# django-webpack-loader
|
# django-webpack-loader
|
||||||
# ------------------------------------------------------------------------------
|
# ------------------------------------------------------------------------------
|
||||||
WEBPACK_LOADER["DEFAULT"][ # noqa: F405
|
WEBPACK_LOADER["DEFAULT"]["LOADER_CLASS"] = "webpack_loader.loader.FakeWebpackLoader" # noqa: F405
|
||||||
"LOADER_CLASS"
|
|
||||||
] = "webpack_loader.loader.FakeWebpackLoader"
|
|
||||||
|
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
# Your stuff...
|
# Your stuff...
|
||||||
|
|
|
@ -14,9 +14,7 @@ from rest_framework.authtoken.views import obtain_auth_token
|
||||||
|
|
||||||
urlpatterns = [
|
urlpatterns = [
|
||||||
path("", TemplateView.as_view(template_name="pages/home.html"), name="home"),
|
path("", TemplateView.as_view(template_name="pages/home.html"), name="home"),
|
||||||
path(
|
path("about/", TemplateView.as_view(template_name="pages/about.html"), name="about"),
|
||||||
"about/", TemplateView.as_view(template_name="pages/about.html"), name="about"
|
|
||||||
),
|
|
||||||
# Django Admin, use {% raw %}{% url 'admin:index' %}{% endraw %}
|
# Django Admin, use {% raw %}{% url 'admin:index' %}{% endraw %}
|
||||||
path(settings.ADMIN_URL, admin.site.urls),
|
path(settings.ADMIN_URL, admin.site.urls),
|
||||||
# User management
|
# User management
|
||||||
|
|
92
{{cookiecutter.project_slug}}/pyproject.toml
Normal file
92
{{cookiecutter.project_slug}}/pyproject.toml
Normal file
|
@ -0,0 +1,92 @@
|
||||||
|
# ==== pytest ====
|
||||||
|
[tool.pytest.ini_options]
|
||||||
|
minversion = "6.0"
|
||||||
|
addopts = "--ds=config.settings.test --reuse-db"
|
||||||
|
python_files = [
|
||||||
|
"tests.py",
|
||||||
|
"test_*.py",
|
||||||
|
]
|
||||||
|
{%- if cookiecutter.frontend_pipeline == 'Gulp' %}
|
||||||
|
norecursedirs = ["node_modules"]
|
||||||
|
{%- endif %}
|
||||||
|
|
||||||
|
# ==== Coverage ====
|
||||||
|
[tool.coverage.run]
|
||||||
|
include = ["{{cookiecutter.project_slug}}/**"]
|
||||||
|
omit = ["*/migrations/*", "*/tests/*"]
|
||||||
|
plugins = ["django_coverage_plugin"]
|
||||||
|
|
||||||
|
|
||||||
|
# ==== black ====
|
||||||
|
[tool.black]
|
||||||
|
line-length = 119
|
||||||
|
target-version = ['py311']
|
||||||
|
|
||||||
|
|
||||||
|
# ==== isort ====
|
||||||
|
[tool.isort]
|
||||||
|
profile = "black"
|
||||||
|
line_length = 119
|
||||||
|
known_first_party = [
|
||||||
|
"{{cookiecutter.project_slug}}",
|
||||||
|
"config",
|
||||||
|
]
|
||||||
|
skip = ["venv/"]
|
||||||
|
skip_glob = ["**/migrations/*.py"]
|
||||||
|
|
||||||
|
|
||||||
|
# ==== mypy ====
|
||||||
|
[tool.mypy]
|
||||||
|
python_version = "3.11"
|
||||||
|
check_untyped_defs = true
|
||||||
|
ignore_missing_imports = true
|
||||||
|
warn_unused_ignores = true
|
||||||
|
warn_redundant_casts = true
|
||||||
|
warn_unused_configs = true
|
||||||
|
plugins = [
|
||||||
|
"mypy_django_plugin.main",
|
||||||
|
{%- if cookiecutter.use_drf == "y" %}
|
||||||
|
"mypy_drf_plugin.main",
|
||||||
|
{%- endif %}
|
||||||
|
]
|
||||||
|
|
||||||
|
[[tool.mypy.overrides]]
|
||||||
|
# Django migrations should not produce any errors:
|
||||||
|
module = "*.migrations.*"
|
||||||
|
ignore_errors = true
|
||||||
|
|
||||||
|
[tool.django-stubs]
|
||||||
|
django_settings_module = "config.settings.test"
|
||||||
|
|
||||||
|
|
||||||
|
# ==== PyLint ====
|
||||||
|
[tool.pylint.MASTER]
|
||||||
|
load-plugins = [
|
||||||
|
"pylint_django",
|
||||||
|
{%- if cookiecutter.use_celery == "y" %}
|
||||||
|
"pylint_celery",
|
||||||
|
{%- endif %}
|
||||||
|
]
|
||||||
|
django-settings-module = "config.settings.local"
|
||||||
|
|
||||||
|
[tool.pylint.FORMAT]
|
||||||
|
max-line-length = 119
|
||||||
|
|
||||||
|
[tool.pylint."MESSAGES CONTROL"]
|
||||||
|
disable = [
|
||||||
|
"missing-docstring",
|
||||||
|
"invalid-name",
|
||||||
|
]
|
||||||
|
|
||||||
|
[tool.pylint.DESIGN]
|
||||||
|
max-parents = 13
|
||||||
|
|
||||||
|
[tool.pylint.TYPECHECK]
|
||||||
|
generated-members = [
|
||||||
|
"REQUEST",
|
||||||
|
"acl_users",
|
||||||
|
"aq_parent",
|
||||||
|
"[a-zA-Z]+_set{1,2}",
|
||||||
|
"save",
|
||||||
|
"delete",
|
||||||
|
]
|
|
@ -1,6 +0,0 @@
|
||||||
[pytest]
|
|
||||||
addopts = --ds=config.settings.test --reuse-db
|
|
||||||
python_files = tests.py test_*.py
|
|
||||||
{%- if cookiecutter.frontend_pipeline == 'Gulp' %}
|
|
||||||
norecursedirs = node_modules
|
|
||||||
{%- endif %}
|
|
|
@ -1,40 +1,10 @@
|
||||||
|
# flake8 and pycodestyle don't support pyproject.toml
|
||||||
|
# https://github.com/PyCQA/flake8/issues/234
|
||||||
|
# https://github.com/PyCQA/pycodestyle/issues/813
|
||||||
[flake8]
|
[flake8]
|
||||||
max-line-length = 120
|
max-line-length = 119
|
||||||
exclude = .tox,.git,*/migrations/*,*/static/CACHE/*,docs,node_modules,venv,.venv
|
exclude = .tox,.git,*/migrations/*,*/static/CACHE/*,docs,node_modules,venv,.venv
|
||||||
|
|
||||||
[pycodestyle]
|
[pycodestyle]
|
||||||
max-line-length = 120
|
max-line-length = 119
|
||||||
exclude = .tox,.git,*/migrations/*,*/static/CACHE/*,docs,node_modules,venv,.venv
|
exclude = .tox,.git,*/migrations/*,*/static/CACHE/*,docs,node_modules,venv,.venv
|
||||||
|
|
||||||
[isort]
|
|
||||||
line_length = 88
|
|
||||||
known_first_party = {{cookiecutter.project_slug}},config
|
|
||||||
multi_line_output = 3
|
|
||||||
default_section = THIRDPARTY
|
|
||||||
skip = venv/
|
|
||||||
skip_glob = **/migrations/*.py
|
|
||||||
include_trailing_comma = true
|
|
||||||
force_grid_wrap = 0
|
|
||||||
use_parentheses = true
|
|
||||||
|
|
||||||
[mypy]
|
|
||||||
python_version = 3.11
|
|
||||||
check_untyped_defs = True
|
|
||||||
ignore_missing_imports = True
|
|
||||||
warn_unused_ignores = True
|
|
||||||
warn_redundant_casts = True
|
|
||||||
warn_unused_configs = True
|
|
||||||
plugins = mypy_django_plugin.main{% if cookiecutter.use_drf == "y" %}, mypy_drf_plugin.main{% endif %}
|
|
||||||
|
|
||||||
[mypy.plugins.django-stubs]
|
|
||||||
django_settings_module = config.settings.test
|
|
||||||
|
|
||||||
[mypy-*.migrations.*]
|
|
||||||
# Django migrations should not produce any errors:
|
|
||||||
ignore_errors = True
|
|
||||||
|
|
||||||
[coverage:run]
|
|
||||||
include = {{cookiecutter.project_slug}}/**
|
|
||||||
omit = */migrations/*, */tests/*
|
|
||||||
plugins =
|
|
||||||
django_coverage_plugin
|
|
||||||
|
|
|
@ -1,5 +1,2 @@
|
||||||
__version__ = "{{ cookiecutter.version }}"
|
__version__ = "{{ cookiecutter.version }}"
|
||||||
__version_info__ = tuple(
|
__version_info__ = tuple(int(num) if num.isdigit() else num for num in __version__.replace("-", ".", 1).split("."))
|
||||||
int(num) if num.isdigit() else num
|
|
||||||
for num in __version__.replace("-", ".", 1).split(".")
|
|
||||||
)
|
|
||||||
|
|
|
@ -11,12 +11,12 @@ class UserSerializer(serializers.ModelSerializer):
|
||||||
fields = ["name", "url"]
|
fields = ["name", "url"]
|
||||||
|
|
||||||
extra_kwargs = {
|
extra_kwargs = {
|
||||||
"url": {"view_name": "api:user-detail", "lookup_field": "pk"}
|
"url": {"view_name": "api:user-detail", "lookup_field": "pk"},
|
||||||
}
|
}
|
||||||
{%- else %}
|
{%- else %}
|
||||||
fields = ["username", "name", "url"]
|
fields = ["username", "name", "url"]
|
||||||
|
|
||||||
extra_kwargs = {
|
extra_kwargs = {
|
||||||
"url": {"view_name": "api:user-detail", "lookup_field": "username"}
|
"url": {"view_name": "api:user-detail", "lookup_field": "username"},
|
||||||
}
|
}
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
|
@ -5,16 +5,10 @@ from {{ cookiecutter.project_slug }}.users.models import User
|
||||||
|
|
||||||
def test_user_detail(user: User):
|
def test_user_detail(user: User):
|
||||||
{%- if cookiecutter.username_type == "email" %}
|
{%- if cookiecutter.username_type == "email" %}
|
||||||
assert (
|
assert reverse("api:user-detail", kwargs={"pk": user.pk}) == f"/api/users/{user.pk}/"
|
||||||
reverse("api:user-detail", kwargs={"pk": user.pk})
|
|
||||||
== f"/api/users/{user.pk}/"
|
|
||||||
)
|
|
||||||
assert resolve(f"/api/users/{user.pk}/").view_name == "api:user-detail"
|
assert resolve(f"/api/users/{user.pk}/").view_name == "api:user-detail"
|
||||||
{%- else %}
|
{%- else %}
|
||||||
assert (
|
assert reverse("api:user-detail", kwargs={"username": user.username}) == f"/api/users/{user.username}/"
|
||||||
reverse("api:user-detail", kwargs={"username": user.username})
|
|
||||||
== f"/api/users/{user.username}/"
|
|
||||||
)
|
|
||||||
assert resolve(f"/api/users/{user.username}/").view_name == "api:user-detail"
|
assert resolve(f"/api/users/{user.username}/").view_name == "api:user-detail"
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
||||||
|
|
|
@ -8,10 +8,7 @@ def test_detail(user: User):
|
||||||
assert reverse("users:detail", kwargs={"pk": user.pk}) == f"/users/{user.pk}/"
|
assert reverse("users:detail", kwargs={"pk": user.pk}) == f"/users/{user.pk}/"
|
||||||
assert resolve(f"/users/{user.pk}/").view_name == "users:detail"
|
assert resolve(f"/users/{user.pk}/").view_name == "users:detail"
|
||||||
{%- else %}
|
{%- else %}
|
||||||
assert (
|
assert reverse("users:detail", kwargs={"username": user.username}) == f"/users/{user.username}/"
|
||||||
reverse("users:detail", kwargs={"username": user.username})
|
|
||||||
== f"/users/{user.username}/"
|
|
||||||
)
|
|
||||||
assert resolve(f"/users/{user.username}/").view_name == "users:detail"
|
assert resolve(f"/users/{user.username}/").view_name == "users:detail"
|
||||||
{%- endif %}
|
{%- endif %}
|
||||||
|
|
||||||
|
|
|
@ -28,9 +28,7 @@ class UserUpdateView(LoginRequiredMixin, SuccessMessageMixin, UpdateView):
|
||||||
success_message = _("Information successfully updated")
|
success_message = _("Information successfully updated")
|
||||||
|
|
||||||
def get_success_url(self):
|
def get_success_url(self):
|
||||||
assert (
|
assert self.request.user.is_authenticated # for mypy to know that the user is authenticated
|
||||||
self.request.user.is_authenticated
|
|
||||||
) # for mypy to know that the user is authenticated
|
|
||||||
return self.request.user.get_absolute_url()
|
return self.request.user.get_absolute_url()
|
||||||
|
|
||||||
def get_object(self):
|
def get_object(self):
|
||||||
|
|
Loading…
Reference in New Issue
Block a user