2015-11-09 01:46:38 +03:00
|
|
|
import os
|
|
|
|
import re
|
|
|
|
|
2015-11-09 01:05:32 +03:00
|
|
|
import pytest
|
2019-06-05 21:00:43 +03:00
|
|
|
from cookiecutter.exceptions import FailedHookException
|
2019-04-01 10:47:46 +03:00
|
|
|
import sh
|
|
|
|
import yaml
|
2015-11-09 01:46:38 +03:00
|
|
|
from binaryornot.check import is_binary
|
|
|
|
|
2019-05-27 18:54:08 +03:00
|
|
|
PATTERN = r"{{(\s?cookiecutter)[.](.*?)}}"
|
2015-11-09 01:46:38 +03:00
|
|
|
RE_OBJ = re.compile(PATTERN)
|
|
|
|
|
2015-11-09 01:05:32 +03:00
|
|
|
|
|
|
|
@pytest.fixture
|
|
|
|
def context():
|
|
|
|
return {
|
2018-04-09 01:03:29 +03:00
|
|
|
"project_name": "My Test Project",
|
|
|
|
"project_slug": "my_test_project",
|
|
|
|
"author_name": "Test Author",
|
|
|
|
"email": "test@example.com",
|
|
|
|
"description": "A short description of the project.",
|
|
|
|
"domain_name": "example.com",
|
|
|
|
"version": "0.1.0",
|
|
|
|
"timezone": "UTC",
|
2015-11-09 01:05:32 +03:00
|
|
|
}
|
|
|
|
|
|
|
|
|
2020-02-29 20:15:35 +03:00
|
|
|
SUPPORTED_COMBINATIONS = [
|
|
|
|
{"open_source_license": "MIT"},
|
|
|
|
{"open_source_license": "BSD"},
|
|
|
|
{"open_source_license": "GPLv3"},
|
|
|
|
{"open_source_license": "Apache Software License 2.0"},
|
|
|
|
{"open_source_license": "Not open source"},
|
|
|
|
{"windows": "y"},
|
|
|
|
{"windows": "n"},
|
|
|
|
{"use_pycharm": "y"},
|
|
|
|
{"use_pycharm": "n"},
|
|
|
|
{"use_docker": "y"},
|
|
|
|
{"use_docker": "n"},
|
2021-11-22 12:26:46 +03:00
|
|
|
{"postgresql_version": "14.1"},
|
|
|
|
{"postgresql_version": "13.5"},
|
|
|
|
{"postgresql_version": "12.9"},
|
|
|
|
{"postgresql_version": "11.14"},
|
|
|
|
{"postgresql_version": "10.19"},
|
2020-02-29 20:15:35 +03:00
|
|
|
{"cloud_provider": "AWS", "use_whitenoise": "y"},
|
|
|
|
{"cloud_provider": "AWS", "use_whitenoise": "n"},
|
|
|
|
{"cloud_provider": "GCP", "use_whitenoise": "y"},
|
|
|
|
{"cloud_provider": "GCP", "use_whitenoise": "n"},
|
2020-03-22 20:31:52 +03:00
|
|
|
{"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "Mailgun"},
|
|
|
|
{"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "Mailjet"},
|
|
|
|
{"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "Mandrill"},
|
|
|
|
{"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "Postmark"},
|
|
|
|
{"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "Sendgrid"},
|
|
|
|
{"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "SendinBlue"},
|
|
|
|
{"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "SparkPost"},
|
|
|
|
{"cloud_provider": "None", "use_whitenoise": "y", "mail_service": "Other SMTP"},
|
2020-02-29 20:15:35 +03:00
|
|
|
# Note: cloud_provider=None AND use_whitenoise=n is not supported
|
2020-03-22 20:31:52 +03:00
|
|
|
{"cloud_provider": "AWS", "mail_service": "Mailgun"},
|
|
|
|
{"cloud_provider": "AWS", "mail_service": "Amazon SES"},
|
|
|
|
{"cloud_provider": "AWS", "mail_service": "Mailjet"},
|
|
|
|
{"cloud_provider": "AWS", "mail_service": "Mandrill"},
|
|
|
|
{"cloud_provider": "AWS", "mail_service": "Postmark"},
|
|
|
|
{"cloud_provider": "AWS", "mail_service": "Sendgrid"},
|
|
|
|
{"cloud_provider": "AWS", "mail_service": "SendinBlue"},
|
|
|
|
{"cloud_provider": "AWS", "mail_service": "SparkPost"},
|
|
|
|
{"cloud_provider": "AWS", "mail_service": "Other SMTP"},
|
|
|
|
{"cloud_provider": "GCP", "mail_service": "Mailgun"},
|
|
|
|
{"cloud_provider": "GCP", "mail_service": "Mailjet"},
|
|
|
|
{"cloud_provider": "GCP", "mail_service": "Mandrill"},
|
|
|
|
{"cloud_provider": "GCP", "mail_service": "Postmark"},
|
|
|
|
{"cloud_provider": "GCP", "mail_service": "Sendgrid"},
|
|
|
|
{"cloud_provider": "GCP", "mail_service": "SendinBlue"},
|
|
|
|
{"cloud_provider": "GCP", "mail_service": "SparkPost"},
|
|
|
|
{"cloud_provider": "GCP", "mail_service": "Other SMTP"},
|
|
|
|
# Note: cloud_providers GCP and None with mail_service Amazon SES is not supported
|
2020-03-24 22:53:45 +03:00
|
|
|
{"use_async": "y"},
|
|
|
|
{"use_async": "n"},
|
2020-02-29 20:15:35 +03:00
|
|
|
{"use_drf": "y"},
|
|
|
|
{"use_drf": "n"},
|
|
|
|
{"js_task_runner": "None"},
|
|
|
|
{"js_task_runner": "Gulp"},
|
|
|
|
{"custom_bootstrap_compilation": "y"},
|
|
|
|
{"custom_bootstrap_compilation": "n"},
|
|
|
|
{"use_compressor": "y"},
|
|
|
|
{"use_compressor": "n"},
|
|
|
|
{"use_celery": "y"},
|
|
|
|
{"use_celery": "n"},
|
|
|
|
{"use_mailhog": "y"},
|
|
|
|
{"use_mailhog": "n"},
|
|
|
|
{"use_sentry": "y"},
|
|
|
|
{"use_sentry": "n"},
|
|
|
|
{"use_whitenoise": "y"},
|
|
|
|
{"use_whitenoise": "n"},
|
|
|
|
{"use_heroku": "y"},
|
|
|
|
{"use_heroku": "n"},
|
|
|
|
{"ci_tool": "None"},
|
|
|
|
{"ci_tool": "Travis"},
|
|
|
|
{"ci_tool": "Gitlab"},
|
2020-09-21 13:20:24 +03:00
|
|
|
{"ci_tool": "Github"},
|
2020-02-29 20:15:35 +03:00
|
|
|
{"keep_local_envs_in_vcs": "y"},
|
|
|
|
{"keep_local_envs_in_vcs": "n"},
|
|
|
|
{"debug": "y"},
|
|
|
|
{"debug": "n"},
|
|
|
|
]
|
|
|
|
|
2020-03-14 20:29:08 +03:00
|
|
|
UNSUPPORTED_COMBINATIONS = [
|
|
|
|
{"cloud_provider": "None", "use_whitenoise": "n"},
|
2020-03-22 20:31:52 +03:00
|
|
|
{"cloud_provider": "GCP", "mail_service": "Amazon SES"},
|
|
|
|
{"cloud_provider": "None", "mail_service": "Amazon SES"},
|
2020-03-14 20:29:08 +03:00
|
|
|
]
|
2020-02-29 20:15:35 +03:00
|
|
|
|
|
|
|
|
|
|
|
def _fixture_id(ctx):
|
|
|
|
"""Helper to get a user friendly test name from the parametrized context."""
|
|
|
|
return "-".join(f"{key}:{value}" for key, value in ctx.items())
|
2019-04-01 10:47:46 +03:00
|
|
|
|
|
|
|
|
2015-11-09 01:46:38 +03:00
|
|
|
def build_files_list(root_dir):
|
|
|
|
"""Build a list containing absolute paths to the generated files."""
|
|
|
|
return [
|
|
|
|
os.path.join(dirpath, file_path)
|
|
|
|
for dirpath, subdirs, files in os.walk(root_dir)
|
|
|
|
for file_path in files
|
|
|
|
]
|
|
|
|
|
2015-11-09 02:02:07 +03:00
|
|
|
|
2015-11-09 01:46:38 +03:00
|
|
|
def check_paths(paths):
|
2020-02-29 20:15:35 +03:00
|
|
|
"""Method to check all paths have correct substitutions."""
|
2015-11-09 01:46:38 +03:00
|
|
|
# Assert that no match is found in any of the files
|
|
|
|
for path in paths:
|
|
|
|
if is_binary(path):
|
|
|
|
continue
|
2018-04-09 01:03:29 +03:00
|
|
|
|
|
|
|
for line in open(path, "r"):
|
2015-11-09 01:46:38 +03:00
|
|
|
match = RE_OBJ.search(line)
|
2020-07-04 16:58:17 +03:00
|
|
|
assert match is None, f"cookiecutter variable not replaced in {path}"
|
2015-11-09 01:46:38 +03:00
|
|
|
|
|
|
|
|
2020-02-29 20:15:35 +03:00
|
|
|
@pytest.mark.parametrize("context_override", SUPPORTED_COMBINATIONS, ids=_fixture_id)
|
|
|
|
def test_project_generation(cookies, context, context_override):
|
|
|
|
"""Test that project is generated and fully rendered."""
|
2020-09-21 13:20:24 +03:00
|
|
|
|
2020-02-29 20:15:35 +03:00
|
|
|
result = cookies.bake(extra_context={**context, **context_override})
|
2015-11-09 01:05:32 +03:00
|
|
|
assert result.exit_code == 0
|
2015-11-09 01:46:38 +03:00
|
|
|
assert result.exception is None
|
2018-04-09 01:03:29 +03:00
|
|
|
assert result.project.basename == context["project_slug"]
|
2015-11-09 01:46:38 +03:00
|
|
|
assert result.project.isdir()
|
|
|
|
|
|
|
|
paths = build_files_list(str(result.project))
|
|
|
|
assert paths
|
|
|
|
check_paths(paths)
|
2015-11-09 01:59:10 +03:00
|
|
|
|
|
|
|
|
2020-02-29 20:15:35 +03:00
|
|
|
@pytest.mark.parametrize("context_override", SUPPORTED_COMBINATIONS, ids=_fixture_id)
|
|
|
|
def test_flake8_passes(cookies, context_override):
|
|
|
|
"""Generated project should pass flake8."""
|
|
|
|
result = cookies.bake(extra_context=context_override)
|
2015-11-09 02:02:07 +03:00
|
|
|
|
|
|
|
try:
|
2020-05-12 12:14:34 +03:00
|
|
|
sh.flake8(_cwd=str(result.project))
|
2015-11-09 02:02:07 +03:00
|
|
|
except sh.ErrorReturnCode as e:
|
2020-03-03 22:37:27 +03:00
|
|
|
pytest.fail(e.stdout.decode())
|
2019-05-15 14:37:17 +03:00
|
|
|
|
|
|
|
|
2020-02-29 20:15:35 +03:00
|
|
|
@pytest.mark.parametrize("context_override", SUPPORTED_COMBINATIONS, ids=_fixture_id)
|
|
|
|
def test_black_passes(cookies, context_override):
|
|
|
|
"""Generated project should pass black."""
|
|
|
|
result = cookies.bake(extra_context=context_override)
|
2019-05-15 14:37:17 +03:00
|
|
|
|
2019-04-01 10:47:46 +03:00
|
|
|
try:
|
2020-05-12 12:24:25 +03:00
|
|
|
sh.black(
|
|
|
|
"--check", "--diff", "--exclude", "migrations", _cwd=str(result.project)
|
|
|
|
)
|
2019-04-01 10:47:46 +03:00
|
|
|
except sh.ErrorReturnCode as e:
|
2020-03-03 22:37:27 +03:00
|
|
|
pytest.fail(e.stdout.decode())
|
2019-04-01 10:47:46 +03:00
|
|
|
|
Add missing `script` key to Travis CI config (#1950)
* Add failing test for travis.yml
I see three options to test travis.yml :
1. Testing that the YAML contains relevant value. Least useful and least
reliable, but simplest to implement.
2. Testing that the YAML is valid TravisCI YAML. Unfortunately this is
difficult / impossible. Doing 'travis lint' would succeed, this command
does not check for 'script' key presence and wouldn't be useful for us.
We could use 'travis-build' to verify that the YAML can be converted to
a worker config, but as of now 'travis-build' doesn't work out of the
box.
There is a new tool for validating travis YAML files 'travis-yml', but
as of now it's a ruby-only library and it's still a work in progress.
3. Running Travis CI task based on the generated YAML. This seems the
best approach, however since cookiecutter-django itself uses Travis CI,
that would require running Travis CI from within Travis CI.
Scheduling Travis CI job without a github push still requires a public
github repo, which is something that we can't generate on demand.
Given that I'm opting to use approach 1.
* Adds missing config to generated .travis.yml
The keys added are as follows:
1. 'script'
Required by Travis, cookiecutter-django used to provide it until it has
been removed together with hitch.
I'm assuming hitch has been replaced with pytest, I'm setting pytest as
the new value for the 'script' key.
2. 'install'
Not required by Travis, but necessary in our case; installs test
libraries, mostly pytest.
As of now this points to 'local.txt' requirements file. There used to be
a separate 'test.txt' requirements file but it has been decided to merge
it with 'local.txt', see discussion in
https://github.com/pydanny/cookiecutter-django/pull/1557 .
* Update CONTRIBUTORS.rst
2019-03-06 04:10:45 +03:00
|
|
|
|
2020-04-16 20:34:12 +03:00
|
|
|
@pytest.mark.parametrize(
|
|
|
|
["use_docker", "expected_test_script"],
|
2020-08-27 10:41:28 +03:00
|
|
|
[
|
|
|
|
("n", "pytest"),
|
|
|
|
("y", "docker-compose -f local.yml run django pytest"),
|
|
|
|
],
|
2020-04-16 20:34:12 +03:00
|
|
|
)
|
|
|
|
def test_travis_invokes_pytest(cookies, context, use_docker, expected_test_script):
|
|
|
|
context.update({"ci_tool": "Travis", "use_docker": use_docker})
|
Add missing `script` key to Travis CI config (#1950)
* Add failing test for travis.yml
I see three options to test travis.yml :
1. Testing that the YAML contains relevant value. Least useful and least
reliable, but simplest to implement.
2. Testing that the YAML is valid TravisCI YAML. Unfortunately this is
difficult / impossible. Doing 'travis lint' would succeed, this command
does not check for 'script' key presence and wouldn't be useful for us.
We could use 'travis-build' to verify that the YAML can be converted to
a worker config, but as of now 'travis-build' doesn't work out of the
box.
There is a new tool for validating travis YAML files 'travis-yml', but
as of now it's a ruby-only library and it's still a work in progress.
3. Running Travis CI task based on the generated YAML. This seems the
best approach, however since cookiecutter-django itself uses Travis CI,
that would require running Travis CI from within Travis CI.
Scheduling Travis CI job without a github push still requires a public
github repo, which is something that we can't generate on demand.
Given that I'm opting to use approach 1.
* Adds missing config to generated .travis.yml
The keys added are as follows:
1. 'script'
Required by Travis, cookiecutter-django used to provide it until it has
been removed together with hitch.
I'm assuming hitch has been replaced with pytest, I'm setting pytest as
the new value for the 'script' key.
2. 'install'
Not required by Travis, but necessary in our case; installs test
libraries, mostly pytest.
As of now this points to 'local.txt' requirements file. There used to be
a separate 'test.txt' requirements file but it has been decided to merge
it with 'local.txt', see discussion in
https://github.com/pydanny/cookiecutter-django/pull/1557 .
* Update CONTRIBUTORS.rst
2019-03-06 04:10:45 +03:00
|
|
|
result = cookies.bake(extra_context=context)
|
|
|
|
|
|
|
|
assert result.exit_code == 0
|
|
|
|
assert result.exception is None
|
|
|
|
assert result.project.basename == context["project_slug"]
|
|
|
|
assert result.project.isdir()
|
|
|
|
|
2019-03-18 20:49:43 +03:00
|
|
|
with open(f"{result.project}/.travis.yml", "r") as travis_yml:
|
Add missing `script` key to Travis CI config (#1950)
* Add failing test for travis.yml
I see three options to test travis.yml :
1. Testing that the YAML contains relevant value. Least useful and least
reliable, but simplest to implement.
2. Testing that the YAML is valid TravisCI YAML. Unfortunately this is
difficult / impossible. Doing 'travis lint' would succeed, this command
does not check for 'script' key presence and wouldn't be useful for us.
We could use 'travis-build' to verify that the YAML can be converted to
a worker config, but as of now 'travis-build' doesn't work out of the
box.
There is a new tool for validating travis YAML files 'travis-yml', but
as of now it's a ruby-only library and it's still a work in progress.
3. Running Travis CI task based on the generated YAML. This seems the
best approach, however since cookiecutter-django itself uses Travis CI,
that would require running Travis CI from within Travis CI.
Scheduling Travis CI job without a github push still requires a public
github repo, which is something that we can't generate on demand.
Given that I'm opting to use approach 1.
* Adds missing config to generated .travis.yml
The keys added are as follows:
1. 'script'
Required by Travis, cookiecutter-django used to provide it until it has
been removed together with hitch.
I'm assuming hitch has been replaced with pytest, I'm setting pytest as
the new value for the 'script' key.
2. 'install'
Not required by Travis, but necessary in our case; installs test
libraries, mostly pytest.
As of now this points to 'local.txt' requirements file. There used to be
a separate 'test.txt' requirements file but it has been decided to merge
it with 'local.txt', see discussion in
https://github.com/pydanny/cookiecutter-django/pull/1557 .
* Update CONTRIBUTORS.rst
2019-03-06 04:10:45 +03:00
|
|
|
try:
|
2020-07-04 16:58:17 +03:00
|
|
|
yml = yaml.safe_load(travis_yml)["jobs"]["include"]
|
2020-04-14 18:52:14 +03:00
|
|
|
assert yml[0]["script"] == ["flake8"]
|
2020-04-16 20:34:12 +03:00
|
|
|
assert yml[1]["script"] == [expected_test_script]
|
Add missing `script` key to Travis CI config (#1950)
* Add failing test for travis.yml
I see three options to test travis.yml :
1. Testing that the YAML contains relevant value. Least useful and least
reliable, but simplest to implement.
2. Testing that the YAML is valid TravisCI YAML. Unfortunately this is
difficult / impossible. Doing 'travis lint' would succeed, this command
does not check for 'script' key presence and wouldn't be useful for us.
We could use 'travis-build' to verify that the YAML can be converted to
a worker config, but as of now 'travis-build' doesn't work out of the
box.
There is a new tool for validating travis YAML files 'travis-yml', but
as of now it's a ruby-only library and it's still a work in progress.
3. Running Travis CI task based on the generated YAML. This seems the
best approach, however since cookiecutter-django itself uses Travis CI,
that would require running Travis CI from within Travis CI.
Scheduling Travis CI job without a github push still requires a public
github repo, which is something that we can't generate on demand.
Given that I'm opting to use approach 1.
* Adds missing config to generated .travis.yml
The keys added are as follows:
1. 'script'
Required by Travis, cookiecutter-django used to provide it until it has
been removed together with hitch.
I'm assuming hitch has been replaced with pytest, I'm setting pytest as
the new value for the 'script' key.
2. 'install'
Not required by Travis, but necessary in our case; installs test
libraries, mostly pytest.
As of now this points to 'local.txt' requirements file. There used to be
a separate 'test.txt' requirements file but it has been decided to merge
it with 'local.txt', see discussion in
https://github.com/pydanny/cookiecutter-django/pull/1557 .
* Update CONTRIBUTORS.rst
2019-03-06 04:10:45 +03:00
|
|
|
except yaml.YAMLError as e:
|
2020-04-16 20:34:12 +03:00
|
|
|
pytest.fail(str(e))
|
2019-06-05 21:00:43 +03:00
|
|
|
|
|
|
|
|
2020-04-16 20:34:12 +03:00
|
|
|
@pytest.mark.parametrize(
|
|
|
|
["use_docker", "expected_test_script"],
|
2020-08-27 10:41:28 +03:00
|
|
|
[
|
|
|
|
("n", "pytest"),
|
|
|
|
("y", "docker-compose -f local.yml run django pytest"),
|
|
|
|
],
|
2020-04-16 20:34:12 +03:00
|
|
|
)
|
|
|
|
def test_gitlab_invokes_flake8_and_pytest(
|
|
|
|
cookies, context, use_docker, expected_test_script
|
|
|
|
):
|
|
|
|
context.update({"ci_tool": "Gitlab", "use_docker": use_docker})
|
2019-12-06 10:55:00 +03:00
|
|
|
result = cookies.bake(extra_context=context)
|
|
|
|
|
|
|
|
assert result.exit_code == 0
|
|
|
|
assert result.exception is None
|
|
|
|
assert result.project.basename == context["project_slug"]
|
|
|
|
assert result.project.isdir()
|
|
|
|
|
|
|
|
with open(f"{result.project}/.gitlab-ci.yml", "r") as gitlab_yml:
|
|
|
|
try:
|
2020-07-04 16:58:17 +03:00
|
|
|
gitlab_config = yaml.safe_load(gitlab_yml)
|
2019-12-06 10:55:00 +03:00
|
|
|
assert gitlab_config["flake8"]["script"] == ["flake8"]
|
2020-04-16 20:34:12 +03:00
|
|
|
assert gitlab_config["pytest"]["script"] == [expected_test_script]
|
2019-12-06 10:55:00 +03:00
|
|
|
except yaml.YAMLError as e:
|
|
|
|
pytest.fail(e)
|
|
|
|
|
|
|
|
|
2020-09-21 13:20:24 +03:00
|
|
|
@pytest.mark.parametrize(
|
|
|
|
["use_docker", "expected_test_script"],
|
|
|
|
[
|
|
|
|
("n", "pytest"),
|
2021-03-03 00:20:20 +03:00
|
|
|
("y", "docker-compose -f local.yml run django pytest"),
|
2020-09-21 13:20:24 +03:00
|
|
|
],
|
|
|
|
)
|
2021-02-23 08:41:03 +03:00
|
|
|
def test_github_invokes_linter_and_pytest(
|
2020-09-21 13:20:24 +03:00
|
|
|
cookies, context, use_docker, expected_test_script
|
|
|
|
):
|
|
|
|
context.update({"ci_tool": "Github", "use_docker": use_docker})
|
|
|
|
result = cookies.bake(extra_context=context)
|
|
|
|
|
|
|
|
assert result.exit_code == 0
|
|
|
|
assert result.exception is None
|
|
|
|
assert result.project.basename == context["project_slug"]
|
|
|
|
assert result.project.isdir()
|
|
|
|
|
|
|
|
with open(f"{result.project}/.github/workflows/ci.yml", "r") as github_yml:
|
|
|
|
try:
|
|
|
|
github_config = yaml.safe_load(github_yml)
|
2021-02-23 08:41:03 +03:00
|
|
|
linter_present = False
|
|
|
|
for action_step in github_config["jobs"]["linter"]["steps"]:
|
2021-02-23 08:58:37 +03:00
|
|
|
if action_step.get("uses", "NA").startswith("pre-commit"):
|
2021-02-23 08:41:03 +03:00
|
|
|
linter_present = True
|
|
|
|
assert linter_present
|
2020-09-21 13:20:24 +03:00
|
|
|
|
|
|
|
expected_test_script_present = False
|
|
|
|
for action_step in github_config["jobs"]["pytest"]["steps"]:
|
|
|
|
if action_step.get("run") == expected_test_script:
|
|
|
|
expected_test_script_present = True
|
|
|
|
assert expected_test_script_present
|
|
|
|
except yaml.YAMLError as e:
|
|
|
|
pytest.fail(e)
|
|
|
|
|
|
|
|
|
2019-06-05 21:00:43 +03:00
|
|
|
@pytest.mark.parametrize("slug", ["project slug", "Project_Slug"])
|
|
|
|
def test_invalid_slug(cookies, context, slug):
|
|
|
|
"""Invalid slug should failed pre-generation hook."""
|
|
|
|
context.update({"project_slug": slug})
|
|
|
|
|
|
|
|
result = cookies.bake(extra_context=context)
|
|
|
|
|
|
|
|
assert result.exit_code != 0
|
|
|
|
assert isinstance(result.exception, FailedHookException)
|
2019-10-02 18:03:33 +03:00
|
|
|
|
|
|
|
|
2020-02-29 20:15:35 +03:00
|
|
|
@pytest.mark.parametrize("invalid_context", UNSUPPORTED_COMBINATIONS)
|
|
|
|
def test_error_if_incompatible(cookies, context, invalid_context):
|
|
|
|
"""It should not generate project an incompatible combination is selected."""
|
|
|
|
context.update(invalid_context)
|
2020-02-07 05:03:49 +03:00
|
|
|
result = cookies.bake(extra_context=context)
|
|
|
|
|
|
|
|
assert result.exit_code != 0
|
|
|
|
assert isinstance(result.exception, FailedHookException)
|
2021-04-26 14:17:02 +03:00
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.parametrize(
|
|
|
|
["use_pycharm", "pycharm_docs_exist"],
|
|
|
|
[
|
|
|
|
("n", False),
|
|
|
|
("y", True),
|
|
|
|
],
|
|
|
|
)
|
|
|
|
def test_pycharm_docs_removed(cookies, context, use_pycharm, pycharm_docs_exist):
|
|
|
|
"""."""
|
|
|
|
context.update({"use_pycharm": use_pycharm})
|
|
|
|
result = cookies.bake(extra_context=context)
|
|
|
|
|
|
|
|
with open(f"{result.project}/docs/index.rst", "r") as f:
|
2021-04-27 13:41:00 +03:00
|
|
|
has_pycharm_docs = "pycharm/configuration" in f.read()
|
2021-09-11 19:48:47 +03:00
|
|
|
assert has_pycharm_docs is pycharm_docs_exist
|