mirror of
https://github.com/cookiecutter/cookiecutter-django.git
synced 2024-11-10 19:57:09 +03:00
Add pyupgrade to pre-commit config (#3702)
* Add pyupgrade to pre-commit config * Exclude hooks folder from pyupgrade * Autofix: run pyupgrade * Autofix: run pyupgrade on template files
This commit is contained in:
parent
337bbd2820
commit
96b9b95af7
|
@ -8,6 +8,13 @@ repos:
|
||||||
- id: trailing-whitespace
|
- id: trailing-whitespace
|
||||||
- id: check-yaml
|
- id: check-yaml
|
||||||
|
|
||||||
|
- repo: https://github.com/asottile/pyupgrade
|
||||||
|
rev: v2.32.1
|
||||||
|
hooks:
|
||||||
|
- id: pyupgrade
|
||||||
|
args: [--py39-plus]
|
||||||
|
exclude: hooks/
|
||||||
|
|
||||||
- repo: https://github.com/psf/black
|
- repo: https://github.com/psf/black
|
||||||
rev: 22.3.0
|
rev: 22.3.0
|
||||||
hooks:
|
hooks:
|
||||||
|
|
|
@ -39,7 +39,7 @@ master_doc = "index"
|
||||||
|
|
||||||
# General information about the project.
|
# General information about the project.
|
||||||
project = "Cookiecutter Django"
|
project = "Cookiecutter Django"
|
||||||
copyright = "2013-{}, Daniel Roy Greenfeld".format(now.year)
|
copyright = f"2013-{now.year}, Daniel Roy Greenfeld"
|
||||||
|
|
||||||
# The version info for the project you're documenting, acts as replacement for
|
# The version info for the project you're documenting, acts as replacement for
|
||||||
# |version| and |release|, also used in various other places throughout the
|
# |version| and |release|, also used in various other places throughout the
|
||||||
|
|
|
@ -138,7 +138,7 @@ def check_paths(paths):
|
||||||
if is_binary(path):
|
if is_binary(path):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
for line in open(path, "r"):
|
for line in open(path):
|
||||||
match = RE_OBJ.search(line)
|
match = RE_OBJ.search(line)
|
||||||
assert match is None, f"cookiecutter variable not replaced in {path}"
|
assert match is None, f"cookiecutter variable not replaced in {path}"
|
||||||
|
|
||||||
|
@ -203,7 +203,7 @@ def test_travis_invokes_pytest(cookies, context, use_docker, expected_test_scrip
|
||||||
assert result.project_path.name == context["project_slug"]
|
assert result.project_path.name == context["project_slug"]
|
||||||
assert result.project_path.is_dir()
|
assert result.project_path.is_dir()
|
||||||
|
|
||||||
with open(f"{result.project_path}/.travis.yml", "r") as travis_yml:
|
with open(f"{result.project_path}/.travis.yml") as travis_yml:
|
||||||
try:
|
try:
|
||||||
yml = yaml.safe_load(travis_yml)["jobs"]["include"]
|
yml = yaml.safe_load(travis_yml)["jobs"]["include"]
|
||||||
assert yml[0]["script"] == ["flake8"]
|
assert yml[0]["script"] == ["flake8"]
|
||||||
|
@ -230,7 +230,7 @@ def test_gitlab_invokes_flake8_and_pytest(
|
||||||
assert result.project_path.name == context["project_slug"]
|
assert result.project_path.name == context["project_slug"]
|
||||||
assert result.project_path.is_dir()
|
assert result.project_path.is_dir()
|
||||||
|
|
||||||
with open(f"{result.project_path}/.gitlab-ci.yml", "r") as gitlab_yml:
|
with open(f"{result.project_path}/.gitlab-ci.yml") as gitlab_yml:
|
||||||
try:
|
try:
|
||||||
gitlab_config = yaml.safe_load(gitlab_yml)
|
gitlab_config = yaml.safe_load(gitlab_yml)
|
||||||
assert gitlab_config["flake8"]["script"] == ["flake8"]
|
assert gitlab_config["flake8"]["script"] == ["flake8"]
|
||||||
|
@ -257,7 +257,7 @@ def test_github_invokes_linter_and_pytest(
|
||||||
assert result.project_path.name == context["project_slug"]
|
assert result.project_path.name == context["project_slug"]
|
||||||
assert result.project_path.is_dir()
|
assert result.project_path.is_dir()
|
||||||
|
|
||||||
with open(f"{result.project_path}/.github/workflows/ci.yml", "r") as github_yml:
|
with open(f"{result.project_path}/.github/workflows/ci.yml") as github_yml:
|
||||||
try:
|
try:
|
||||||
github_config = yaml.safe_load(github_yml)
|
github_config = yaml.safe_load(github_yml)
|
||||||
linter_present = False
|
linter_present = False
|
||||||
|
@ -308,6 +308,6 @@ def test_pycharm_docs_removed(cookies, context, use_pycharm, pycharm_docs_exist)
|
||||||
context.update({"use_pycharm": use_pycharm})
|
context.update({"use_pycharm": use_pycharm})
|
||||||
result = cookies.bake(extra_context=context)
|
result = cookies.bake(extra_context=context)
|
||||||
|
|
||||||
with open(f"{result.project_path}/docs/index.rst", "r") as f:
|
with open(f"{result.project_path}/docs/index.rst") as f:
|
||||||
has_pycharm_docs = "pycharm/configuration" in f.read()
|
has_pycharm_docs = "pycharm/configuration" in f.read()
|
||||||
assert has_pycharm_docs is pycharm_docs_exist
|
assert has_pycharm_docs is pycharm_docs_exist
|
||||||
|
|
|
@ -9,6 +9,12 @@ repos:
|
||||||
- id: end-of-file-fixer
|
- id: end-of-file-fixer
|
||||||
- id: check-yaml
|
- id: check-yaml
|
||||||
|
|
||||||
|
- repo: https://github.com/asottile/pyupgrade
|
||||||
|
rev: v2.32.1
|
||||||
|
hooks:
|
||||||
|
- id: pyupgrade
|
||||||
|
args: [--py39-plus]
|
||||||
|
|
||||||
- repo: https://github.com/psf/black
|
- repo: https://github.com/psf/black
|
||||||
rev: 22.3.0
|
rev: 22.3.0
|
||||||
hooks:
|
hooks:
|
||||||
|
|
|
@ -18,7 +18,7 @@ def merge(
|
||||||
) -> None:
|
) -> None:
|
||||||
with open(output_file_path, "w") as output_file:
|
with open(output_file_path, "w") as output_file:
|
||||||
for merged_file_path in merged_file_paths:
|
for merged_file_path in merged_file_paths:
|
||||||
with open(merged_file_path, "r") as merged_file:
|
with open(merged_file_path) as merged_file:
|
||||||
merged_file_content = merged_file.read()
|
merged_file_content = merged_file.read()
|
||||||
output_file.write(merged_file_content)
|
output_file.write(merged_file_content)
|
||||||
if append_linesep:
|
if append_linesep:
|
||||||
|
@ -41,7 +41,7 @@ def test_merge(tmpdir_factory, merged_file_count: int, append_linesep: bool):
|
||||||
for i in range(merged_file_count):
|
for i in range(merged_file_count):
|
||||||
merged_file_ord = i + 1
|
merged_file_ord = i + 1
|
||||||
|
|
||||||
merged_filename = ".service{}".format(merged_file_ord)
|
merged_filename = f".service{merged_file_ord}"
|
||||||
merged_file_path = tmp_dir_path / merged_filename
|
merged_file_path = tmp_dir_path / merged_filename
|
||||||
|
|
||||||
merged_file_content = merged_filename * merged_file_ord
|
merged_file_content = merged_filename * merged_file_ord
|
||||||
|
@ -57,7 +57,7 @@ def test_merge(tmpdir_factory, merged_file_count: int, append_linesep: bool):
|
||||||
|
|
||||||
merge(output_file_path, merged_file_paths, append_linesep)
|
merge(output_file_path, merged_file_paths, append_linesep)
|
||||||
|
|
||||||
with open(output_file_path, "r") as output_file:
|
with open(output_file_path) as output_file:
|
||||||
actual_output_file_content = output_file.read()
|
actual_output_file_content = output_file.read()
|
||||||
|
|
||||||
assert actual_output_file_content == expected_output_file_content
|
assert actual_output_file_content == expected_output_file_content
|
||||||
|
|
|
@ -1,7 +1,5 @@
|
||||||
__version__ = "{{ cookiecutter.version }}"
|
__version__ = "{{ cookiecutter.version }}"
|
||||||
__version_info__ = tuple(
|
__version_info__ = tuple(
|
||||||
[
|
int(num) if num.isdigit() else num
|
||||||
int(num) if num.isdigit() else num
|
for num in __version__.replace("-", ".", 1).split(".")
|
||||||
for num in __version__.replace("-", ".", 1).split(".")
|
|
||||||
]
|
|
||||||
)
|
)
|
||||||
|
|
Loading…
Reference in New Issue
Block a user