mirror of
https://github.com/cookiecutter/cookiecutter-django.git
synced 2025-07-16 11:02:22 +03:00
Run Ruff with --unsafe-fixes
This commit is contained in:
parent
3ccfc21411
commit
9297598613
|
@ -284,10 +284,6 @@ def set_flag(file_path, flag, value=None, formatted=None, *args, **kwargs):
|
||||||
if value is None:
|
if value is None:
|
||||||
random_string = generate_random_string(*args, **kwargs)
|
random_string = generate_random_string(*args, **kwargs)
|
||||||
if random_string is None:
|
if random_string is None:
|
||||||
print(
|
|
||||||
"We couldn't find a secure pseudo-random number generator on your "
|
|
||||||
f"system. Please, make sure to manually {flag} later.",
|
|
||||||
)
|
|
||||||
random_string = flag
|
random_string = flag
|
||||||
if formatted is not None:
|
if formatted is not None:
|
||||||
random_string = formatted.format(random_string)
|
random_string = formatted.format(random_string)
|
||||||
|
@ -303,18 +299,17 @@ def set_flag(file_path, flag, value=None, formatted=None, *args, **kwargs):
|
||||||
|
|
||||||
|
|
||||||
def set_django_secret_key(file_path):
|
def set_django_secret_key(file_path):
|
||||||
django_secret_key = set_flag(
|
return set_flag(
|
||||||
file_path,
|
file_path,
|
||||||
"!!!SET DJANGO_SECRET_KEY!!!",
|
"!!!SET DJANGO_SECRET_KEY!!!",
|
||||||
length=64,
|
length=64,
|
||||||
using_digits=True,
|
using_digits=True,
|
||||||
using_ascii_letters=True,
|
using_ascii_letters=True,
|
||||||
)
|
)
|
||||||
return django_secret_key
|
|
||||||
|
|
||||||
|
|
||||||
def set_django_admin_url(file_path):
|
def set_django_admin_url(file_path):
|
||||||
django_admin_url = set_flag(
|
return set_flag(
|
||||||
file_path,
|
file_path,
|
||||||
"!!!SET DJANGO_ADMIN_URL!!!",
|
"!!!SET DJANGO_ADMIN_URL!!!",
|
||||||
formatted="{}/",
|
formatted="{}/",
|
||||||
|
@ -322,7 +317,6 @@ def set_django_admin_url(file_path):
|
||||||
using_digits=True,
|
using_digits=True,
|
||||||
using_ascii_letters=True,
|
using_ascii_letters=True,
|
||||||
)
|
)
|
||||||
return django_admin_url
|
|
||||||
|
|
||||||
|
|
||||||
def generate_random_user():
|
def generate_random_user():
|
||||||
|
@ -334,12 +328,11 @@ def generate_postgres_user(debug=False):
|
||||||
|
|
||||||
|
|
||||||
def set_postgres_user(file_path, value):
|
def set_postgres_user(file_path, value):
|
||||||
postgres_user = set_flag(file_path, "!!!SET POSTGRES_USER!!!", value=value)
|
return set_flag(file_path, "!!!SET POSTGRES_USER!!!", value=value)
|
||||||
return postgres_user
|
|
||||||
|
|
||||||
|
|
||||||
def set_postgres_password(file_path, value=None):
|
def set_postgres_password(file_path, value=None):
|
||||||
postgres_password = set_flag(
|
return set_flag(
|
||||||
file_path,
|
file_path,
|
||||||
"!!!SET POSTGRES_PASSWORD!!!",
|
"!!!SET POSTGRES_PASSWORD!!!",
|
||||||
value=value,
|
value=value,
|
||||||
|
@ -347,16 +340,14 @@ def set_postgres_password(file_path, value=None):
|
||||||
using_digits=True,
|
using_digits=True,
|
||||||
using_ascii_letters=True,
|
using_ascii_letters=True,
|
||||||
)
|
)
|
||||||
return postgres_password
|
|
||||||
|
|
||||||
|
|
||||||
def set_celery_flower_user(file_path, value):
|
def set_celery_flower_user(file_path, value):
|
||||||
celery_flower_user = set_flag(file_path, "!!!SET CELERY_FLOWER_USER!!!", value=value)
|
return set_flag(file_path, "!!!SET CELERY_FLOWER_USER!!!", value=value)
|
||||||
return celery_flower_user
|
|
||||||
|
|
||||||
|
|
||||||
def set_celery_flower_password(file_path, value=None):
|
def set_celery_flower_password(file_path, value=None):
|
||||||
celery_flower_password = set_flag(
|
return set_flag(
|
||||||
file_path,
|
file_path,
|
||||||
"!!!SET CELERY_FLOWER_PASSWORD!!!",
|
"!!!SET CELERY_FLOWER_PASSWORD!!!",
|
||||||
value=value,
|
value=value,
|
||||||
|
@ -364,7 +355,6 @@ def set_celery_flower_password(file_path, value=None):
|
||||||
using_digits=True,
|
using_digits=True,
|
||||||
using_ascii_letters=True,
|
using_ascii_letters=True,
|
||||||
)
|
)
|
||||||
return celery_flower_password
|
|
||||||
|
|
||||||
|
|
||||||
def append_to_gitignore_file(ignored_line):
|
def append_to_gitignore_file(ignored_line):
|
||||||
|
@ -461,11 +451,7 @@ def main():
|
||||||
|
|
||||||
if "{{ cookiecutter.use_docker }}".lower() == "n" and "{{ cookiecutter.use_heroku }}".lower() == "n":
|
if "{{ cookiecutter.use_docker }}".lower() == "n" and "{{ cookiecutter.use_heroku }}".lower() == "n":
|
||||||
if "{{ cookiecutter.keep_local_envs_in_vcs }}".lower() == "y":
|
if "{{ cookiecutter.keep_local_envs_in_vcs }}".lower() == "y":
|
||||||
print(
|
pass
|
||||||
INFO + ".env(s) are only utilized when Docker Compose and/or "
|
|
||||||
"Heroku support is enabled so keeping them does not make sense "
|
|
||||||
"given your current setup." + TERMINATOR,
|
|
||||||
)
|
|
||||||
remove_envs_and_associated_files()
|
remove_envs_and_associated_files()
|
||||||
else:
|
else:
|
||||||
append_to_gitignore_file(".env")
|
append_to_gitignore_file(".env")
|
||||||
|
@ -489,10 +475,7 @@ def main():
|
||||||
)
|
)
|
||||||
|
|
||||||
if "{{ cookiecutter.cloud_provider }}" == "None" and "{{ cookiecutter.use_docker }}".lower() == "n":
|
if "{{ cookiecutter.cloud_provider }}" == "None" and "{{ cookiecutter.use_docker }}".lower() == "n":
|
||||||
print(
|
pass
|
||||||
WARNING + "You chose to not use any cloud providers nor Docker, "
|
|
||||||
"media files won't be served in production." + TERMINATOR,
|
|
||||||
)
|
|
||||||
|
|
||||||
if "{{ cookiecutter.use_celery }}".lower() == "n":
|
if "{{ cookiecutter.use_celery }}".lower() == "n":
|
||||||
remove_celery_files()
|
remove_celery_files()
|
||||||
|
@ -517,7 +500,6 @@ def main():
|
||||||
if "{{ cookiecutter.use_async }}".lower() == "n":
|
if "{{ cookiecutter.use_async }}".lower() == "n":
|
||||||
remove_async_files()
|
remove_async_files()
|
||||||
|
|
||||||
print(SUCCESS + "Project initialized, keep up the good work!" + TERMINATOR)
|
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
|
|
|
@ -23,9 +23,7 @@ assert project_slug == project_slug.lower(), f"'{project_slug}' project slug sho
|
||||||
assert "\\" not in "{{ cookiecutter.author_name }}", "Don't include backslashes in author name."
|
assert "\\" not in "{{ cookiecutter.author_name }}", "Don't include backslashes in author name."
|
||||||
|
|
||||||
if "{{ cookiecutter.use_whitenoise }}".lower() == "n" and "{{ cookiecutter.cloud_provider }}" == "None":
|
if "{{ cookiecutter.use_whitenoise }}".lower() == "n" and "{{ cookiecutter.cloud_provider }}" == "None":
|
||||||
print("You should either use Whitenoise or select a Cloud Provider to serve static files")
|
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if "{{ cookiecutter.mail_service }}" == "Amazon SES" and "{{ cookiecutter.cloud_provider }}" != "AWS":
|
if "{{ cookiecutter.mail_service }}" == "Amazon SES" and "{{ cookiecutter.cloud_provider }}" != "AWS":
|
||||||
print("You should either use AWS or select a different Mail Service for sending emails.")
|
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
|
@ -12,7 +12,6 @@ from __future__ import annotations
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
from collections.abc import Iterable
|
|
||||||
from pathlib import Path
|
from pathlib import Path
|
||||||
from typing import TYPE_CHECKING
|
from typing import TYPE_CHECKING
|
||||||
from typing import Any
|
from typing import Any
|
||||||
|
@ -22,6 +21,8 @@ import requests
|
||||||
from github import Github
|
from github import Github
|
||||||
|
|
||||||
if TYPE_CHECKING:
|
if TYPE_CHECKING:
|
||||||
|
from collections.abc import Iterable
|
||||||
|
|
||||||
from github.Issue import Issue
|
from github.Issue import Issue
|
||||||
|
|
||||||
CURRENT_FILE = Path(__file__)
|
CURRENT_FILE = Path(__file__)
|
||||||
|
@ -62,7 +63,6 @@ def get_package_info(package: str) -> dict:
|
||||||
# "django" converts to "Django" on redirect
|
# "django" converts to "Django" on redirect
|
||||||
r = requests.get(f"https://pypi.org/pypi/{package}/json", allow_redirects=True)
|
r = requests.get(f"https://pypi.org/pypi/{package}/json", allow_redirects=True)
|
||||||
if not r.ok:
|
if not r.ok:
|
||||||
print(f"Couldn't find package: {package}")
|
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
return r.json()
|
return r.json()
|
||||||
|
|
||||||
|
@ -85,7 +85,7 @@ def get_name_and_version(requirements_line: str) -> tuple[str, ...]:
|
||||||
|
|
||||||
|
|
||||||
def get_all_latest_django_versions(
|
def get_all_latest_django_versions(
|
||||||
django_max_version: tuple[DjVersion] = None,
|
django_max_version: tuple[DjVersion] | None = None,
|
||||||
) -> tuple[DjVersion, list[DjVersion]]:
|
) -> tuple[DjVersion, list[DjVersion]]:
|
||||||
"""
|
"""
|
||||||
Grabs all Django versions that are worthy of a GitHub issue.
|
Grabs all Django versions that are worthy of a GitHub issue.
|
||||||
|
@ -95,14 +95,12 @@ def get_all_latest_django_versions(
|
||||||
if django_max_version:
|
if django_max_version:
|
||||||
_django_max_version = django_max_version
|
_django_max_version = django_max_version
|
||||||
|
|
||||||
print("Fetching all Django versions from PyPI")
|
|
||||||
base_txt = REQUIREMENTS_DIR / "base.txt"
|
base_txt = REQUIREMENTS_DIR / "base.txt"
|
||||||
with base_txt.open() as f:
|
with base_txt.open() as f:
|
||||||
for line in f.readlines():
|
for line in f.readlines():
|
||||||
if "django==" in line.lower():
|
if "django==" in line.lower():
|
||||||
break
|
break
|
||||||
else:
|
else:
|
||||||
print(f"django not found in {base_txt}") # Huh...?
|
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
# Begin parsing and verification
|
# Begin parsing and verification
|
||||||
|
@ -151,7 +149,6 @@ class GitHubManager:
|
||||||
self.load_existing_issues()
|
self.load_existing_issues()
|
||||||
|
|
||||||
def load_requirements(self):
|
def load_requirements(self):
|
||||||
print("Reading requirements")
|
|
||||||
for requirements_file in self.requirements_files:
|
for requirements_file in self.requirements_files:
|
||||||
with (REQUIREMENTS_DIR / f"{requirements_file}.txt").open() as f:
|
with (REQUIREMENTS_DIR / f"{requirements_file}.txt").open() as f:
|
||||||
for line in f.readlines():
|
for line in f.readlines():
|
||||||
|
@ -170,7 +167,6 @@ class GitHubManager:
|
||||||
|
|
||||||
def load_existing_issues(self):
|
def load_existing_issues(self):
|
||||||
"""Closes the issue if the base Django version is greater than needed"""
|
"""Closes the issue if the base Django version is greater than needed"""
|
||||||
print("Load existing issues from GitHub")
|
|
||||||
qualifiers = {
|
qualifiers = {
|
||||||
"repo": GITHUB_REPO,
|
"repo": GITHUB_REPO,
|
||||||
"author": "app/github-actions",
|
"author": "app/github-actions",
|
||||||
|
@ -179,7 +175,6 @@ class GitHubManager:
|
||||||
"in": "title",
|
"in": "title",
|
||||||
}
|
}
|
||||||
issues = list(self.github.search_issues("[Django Update]", "created", "desc", **qualifiers))
|
issues = list(self.github.search_issues("[Django Update]", "created", "desc", **qualifiers))
|
||||||
print(f"Found {len(issues)} issues matching search")
|
|
||||||
for issue in issues:
|
for issue in issues:
|
||||||
matches = re.match(r"\[Update Django] Django (\d+.\d+)$", issue.title)
|
matches = re.match(r"\[Update Django] Django (\d+.\d+)$", issue.title)
|
||||||
if not matches:
|
if not matches:
|
||||||
|
@ -263,23 +258,18 @@ class GitHubManager:
|
||||||
|
|
||||||
def create_or_edit_issue(self, needed_dj_version: DjVersion, description: str):
|
def create_or_edit_issue(self, needed_dj_version: DjVersion, description: str):
|
||||||
if issue := self.existing_issues.get(needed_dj_version):
|
if issue := self.existing_issues.get(needed_dj_version):
|
||||||
print(f"Editing issue #{issue.number} for Django {needed_dj_version}")
|
|
||||||
issue.edit(body=description)
|
issue.edit(body=description)
|
||||||
else:
|
else:
|
||||||
print(f"Creating new issue for Django {needed_dj_version}")
|
|
||||||
issue = self.repo.create_issue(f"[Update Django] Django {needed_dj_version}", description)
|
issue = self.repo.create_issue(f"[Update Django] Django {needed_dj_version}", description)
|
||||||
issue.add_to_labels(f"django{needed_dj_version}")
|
issue.add_to_labels(f"django{needed_dj_version}")
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def close_issue(issue: Issue):
|
def close_issue(issue: Issue):
|
||||||
issue.edit(state="closed")
|
issue.edit(state="closed")
|
||||||
print(f"Closed issue {issue.title} (ID: [{issue.id}]({issue.url}))")
|
|
||||||
|
|
||||||
def generate(self):
|
def generate(self):
|
||||||
for version in self.needed_dj_versions:
|
for version in self.needed_dj_versions:
|
||||||
print(f"Handling GitHub issue for Django {version}")
|
|
||||||
md_content = self.generate_markdown(version)
|
md_content = self.generate_markdown(version)
|
||||||
print(f"Generated markdown:\n\n{md_content}")
|
|
||||||
self.create_or_edit_issue(version, md_content)
|
self.create_or_edit_issue(version, md_content)
|
||||||
|
|
||||||
|
|
||||||
|
@ -292,7 +282,6 @@ def main(django_max_version=None) -> None:
|
||||||
manager.setup()
|
manager.setup()
|
||||||
|
|
||||||
if not latest_djs:
|
if not latest_djs:
|
||||||
print("No new Django versions to update. Exiting...")
|
|
||||||
sys.exit(0)
|
sys.exit(0)
|
||||||
|
|
||||||
manager.generate()
|
manager.generate()
|
||||||
|
@ -300,7 +289,8 @@ def main(django_max_version=None) -> None:
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
if GITHUB_REPO is None:
|
if GITHUB_REPO is None:
|
||||||
raise RuntimeError("No github repo, please set the environment variable GITHUB_REPOSITORY")
|
msg = "No github repo, please set the environment variable GITHUB_REPOSITORY"
|
||||||
|
raise RuntimeError(msg)
|
||||||
max_version = None
|
max_version = None
|
||||||
last_arg = sys.argv[-1]
|
last_arg = sys.argv[-1]
|
||||||
if CURRENT_FILE.name not in last_arg:
|
if CURRENT_FILE.name not in last_arg:
|
||||||
|
|
|
@ -26,31 +26,25 @@ def main() -> None:
|
||||||
merged_date = dt.date.today() - dt.timedelta(days=1)
|
merged_date = dt.date.today() - dt.timedelta(days=1)
|
||||||
repo = Github(login_or_token=GITHUB_TOKEN).get_repo(GITHUB_REPO)
|
repo = Github(login_or_token=GITHUB_TOKEN).get_repo(GITHUB_REPO)
|
||||||
merged_pulls = list(iter_pulls(repo, merged_date))
|
merged_pulls = list(iter_pulls(repo, merged_date))
|
||||||
print(f"Merged pull requests: {merged_pulls}")
|
|
||||||
if not merged_pulls:
|
if not merged_pulls:
|
||||||
print("Nothing was merged, existing.")
|
|
||||||
return
|
return
|
||||||
|
|
||||||
# Group pull requests by type of change
|
# Group pull requests by type of change
|
||||||
grouped_pulls = group_pulls_by_change_type(merged_pulls)
|
grouped_pulls = group_pulls_by_change_type(merged_pulls)
|
||||||
if not any(grouped_pulls.values()):
|
if not any(grouped_pulls.values()):
|
||||||
print("Pull requests merged aren't worth a changelog mention.")
|
|
||||||
return
|
return
|
||||||
|
|
||||||
# Generate portion of markdown
|
# Generate portion of markdown
|
||||||
release_changes_summary = generate_md(grouped_pulls)
|
release_changes_summary = generate_md(grouped_pulls)
|
||||||
print(f"Summary of changes: {release_changes_summary}")
|
|
||||||
|
|
||||||
# Update CHANGELOG.md file
|
# Update CHANGELOG.md file
|
||||||
release = f"{merged_date:%Y.%m.%d}"
|
release = f"{merged_date:%Y.%m.%d}"
|
||||||
changelog_path = ROOT / "CHANGELOG.md"
|
changelog_path = ROOT / "CHANGELOG.md"
|
||||||
write_changelog(changelog_path, release, release_changes_summary)
|
write_changelog(changelog_path, release, release_changes_summary)
|
||||||
print(f"Wrote {changelog_path}")
|
|
||||||
|
|
||||||
# Update version
|
# Update version
|
||||||
setup_py_path = ROOT / "pyproject.toml"
|
setup_py_path = ROOT / "pyproject.toml"
|
||||||
update_version(setup_py_path, release)
|
update_version(setup_py_path, release)
|
||||||
print(f"Updated version in {setup_py_path}")
|
|
||||||
|
|
||||||
# Run uv lock
|
# Run uv lock
|
||||||
uv_lock_path = ROOT / "uv.lock"
|
uv_lock_path = ROOT / "uv.lock"
|
||||||
|
@ -60,12 +54,11 @@ def main() -> None:
|
||||||
update_git_repo([changelog_path, setup_py_path, uv_lock_path], release)
|
update_git_repo([changelog_path, setup_py_path, uv_lock_path], release)
|
||||||
|
|
||||||
# Create GitHub release
|
# Create GitHub release
|
||||||
github_release = repo.create_git_release(
|
repo.create_git_release(
|
||||||
tag=release,
|
tag=release,
|
||||||
name=release,
|
name=release,
|
||||||
message=release_changes_summary,
|
message=release_changes_summary,
|
||||||
)
|
)
|
||||||
print(f"Created release on GitHub {github_release}")
|
|
||||||
|
|
||||||
|
|
||||||
def iter_pulls(
|
def iter_pulls(
|
||||||
|
@ -155,14 +148,15 @@ def update_git_repo(paths: list[Path], release: str) -> None:
|
||||||
)
|
)
|
||||||
repo.git.tag("-a", release, m=message)
|
repo.git.tag("-a", release, m=message)
|
||||||
server = f"https://{GITHUB_TOKEN}@github.com/{GITHUB_REPO}.git"
|
server = f"https://{GITHUB_TOKEN}@github.com/{GITHUB_REPO}.git"
|
||||||
print(f"Pushing changes to {GIT_BRANCH} branch of {GITHUB_REPO}")
|
|
||||||
repo.git.push(server, GIT_BRANCH)
|
repo.git.push(server, GIT_BRANCH)
|
||||||
repo.git.push("--tags", server, GIT_BRANCH)
|
repo.git.push("--tags", server, GIT_BRANCH)
|
||||||
|
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
if GITHUB_REPO is None:
|
if GITHUB_REPO is None:
|
||||||
raise RuntimeError("No github repo, please set the environment variable GITHUB_REPOSITORY")
|
msg = "No github repo, please set the environment variable GITHUB_REPOSITORY"
|
||||||
|
raise RuntimeError(msg)
|
||||||
if GIT_BRANCH is None:
|
if GIT_BRANCH is None:
|
||||||
raise RuntimeError("No git branch set, please set the GITHUB_REF_NAME environment variable")
|
msg = "No git branch set, please set the GITHUB_REF_NAME environment variable"
|
||||||
|
raise RuntimeError(msg)
|
||||||
main()
|
main()
|
||||||
|
|
|
@ -26,10 +26,8 @@ def main() -> None:
|
||||||
# Add missing users to the JSON file
|
# Add missing users to the JSON file
|
||||||
contrib_file = ContributorsJSONFile()
|
contrib_file = ContributorsJSONFile()
|
||||||
for author in recent_authors:
|
for author in recent_authors:
|
||||||
print(f"Checking if {author.login} should be added")
|
|
||||||
if author.login not in contrib_file:
|
if author.login not in contrib_file:
|
||||||
contrib_file.add_contributor(author)
|
contrib_file.add_contributor(author)
|
||||||
print(f"Added {author.login} to contributors")
|
|
||||||
contrib_file.save()
|
contrib_file.save()
|
||||||
|
|
||||||
# Generate MD file from JSON file
|
# Generate MD file from JSON file
|
||||||
|
@ -98,5 +96,6 @@ def write_md_file(contributors):
|
||||||
|
|
||||||
if __name__ == "__main__":
|
if __name__ == "__main__":
|
||||||
if GITHUB_REPO is None:
|
if GITHUB_REPO is None:
|
||||||
raise RuntimeError("No github repo, please set the environment variable GITHUB_REPOSITORY")
|
msg = "No github repo, please set the environment variable GITHUB_REPOSITORY"
|
||||||
|
raise RuntimeError(msg)
|
||||||
main()
|
main()
|
||||||
|
|
|
@ -271,7 +271,7 @@ def test_djlint_check_passes(cookies, context_override):
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
["use_docker", "expected_test_script"],
|
("use_docker", "expected_test_script"),
|
||||||
[
|
[
|
||||||
("n", "pytest"),
|
("n", "pytest"),
|
||||||
("y", "docker compose -f docker-compose.local.yml run django pytest"),
|
("y", "docker compose -f docker-compose.local.yml run django pytest"),
|
||||||
|
@ -296,7 +296,7 @@ def test_travis_invokes_pytest(cookies, context, use_docker, expected_test_scrip
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
["use_docker", "expected_test_script"],
|
("use_docker", "expected_test_script"),
|
||||||
[
|
[
|
||||||
("n", "pytest"),
|
("n", "pytest"),
|
||||||
("y", "docker compose -f docker-compose.local.yml run django pytest"),
|
("y", "docker compose -f docker-compose.local.yml run django pytest"),
|
||||||
|
@ -323,7 +323,7 @@ def test_gitlab_invokes_precommit_and_pytest(cookies, context, use_docker, expec
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
["use_docker", "expected_test_script"],
|
("use_docker", "expected_test_script"),
|
||||||
[
|
[
|
||||||
("n", "pytest"),
|
("n", "pytest"),
|
||||||
("y", "docker compose -f docker-compose.local.yml run django pytest"),
|
("y", "docker compose -f docker-compose.local.yml run django pytest"),
|
||||||
|
@ -378,7 +378,7 @@ def test_error_if_incompatible(cookies, context, invalid_context):
|
||||||
|
|
||||||
|
|
||||||
@pytest.mark.parametrize(
|
@pytest.mark.parametrize(
|
||||||
["editor", "pycharm_docs_exist"],
|
("editor", "pycharm_docs_exist"),
|
||||||
[
|
[
|
||||||
("None", False),
|
("None", False),
|
||||||
("PyCharm", True),
|
("PyCharm", True),
|
||||||
|
|
Loading…
Reference in New Issue
Block a user