mirror of
https://github.com/cookiecutter/cookiecutter-django.git
synced 2025-08-15 17:34:52 +03:00
Merge branch 'master' into feat/webpack-rebased
This commit is contained in:
commit
8e43fd4f29
|
@ -21,7 +21,7 @@ repos:
|
|||
- id: black
|
||||
|
||||
- repo: https://github.com/PyCQA/isort
|
||||
rev: 5.11.4
|
||||
rev: 5.12.0
|
||||
hooks:
|
||||
- id: isort
|
||||
|
||||
|
|
|
@ -3,6 +3,11 @@ All enhancements and patches to Cookiecutter Django will be documented in this f
|
|||
|
||||
<!-- GENERATOR_PLACEHOLDER -->
|
||||
|
||||
## 2023.01.27
|
||||
|
||||
### Updated
|
||||
- Update django-stubs to 1.14.0 ([#4103](https://github.com/cookiecutter/cookiecutter-django/pull/4103))
|
||||
|
||||
## 2023.01.26
|
||||
|
||||
### Changed
|
||||
|
|
|
@ -386,6 +386,7 @@ def set_flags_in_settings_files():
|
|||
def remove_envs_and_associated_files():
|
||||
shutil.rmtree(".envs")
|
||||
os.remove("merge_production_dotenvs_in_dotenv.py")
|
||||
shutil.rmtree("tests")
|
||||
|
||||
|
||||
def remove_celery_compose_dirs():
|
||||
|
|
|
@ -5,7 +5,7 @@ binaryornot==0.4.4
|
|||
# Code quality
|
||||
# ------------------------------------------------------------------------------
|
||||
black==22.12.0
|
||||
isort==5.11.4
|
||||
isort==5.12.0
|
||||
flake8==6.0.0
|
||||
flake8-isort==6.0.0
|
||||
pre-commit==3.0.1
|
||||
|
|
2
setup.py
2
setup.py
|
@ -5,7 +5,7 @@ except ImportError:
|
|||
from distutils.core import setup
|
||||
|
||||
# We use calendar versioning
|
||||
version = "2023.01.26"
|
||||
version = "2023.01.27"
|
||||
|
||||
with open("README.rst") as readme_file:
|
||||
long_description = readme_file.read()
|
||||
|
|
|
@ -21,7 +21,7 @@ repos:
|
|||
- id: black
|
||||
|
||||
- repo: https://github.com/PyCQA/isort
|
||||
rev: 5.11.4
|
||||
rev: 5.12.0
|
||||
hooks:
|
||||
- id: isort
|
||||
|
||||
|
|
|
@ -2,66 +2,25 @@ import os
|
|||
from collections.abc import Sequence
|
||||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
BASE_DIR = Path(__file__).parent.resolve()
|
||||
PRODUCTION_DOTENVS_DIR_PATH = BASE_DIR / ".envs" / ".production"
|
||||
PRODUCTION_DOTENV_FILE_PATHS = [
|
||||
PRODUCTION_DOTENVS_DIR_PATH / ".django",
|
||||
PRODUCTION_DOTENVS_DIR_PATH / ".postgres",
|
||||
PRODUCTION_DOTENVS_DIR = BASE_DIR / ".envs" / ".production"
|
||||
PRODUCTION_DOTENV_FILES = [
|
||||
PRODUCTION_DOTENVS_DIR / ".django",
|
||||
PRODUCTION_DOTENVS_DIR / ".postgres",
|
||||
]
|
||||
DOTENV_FILE_PATH = BASE_DIR / ".env"
|
||||
DOTENV_FILE = BASE_DIR / ".env"
|
||||
|
||||
|
||||
def merge(
|
||||
output_file_path: str, merged_file_paths: Sequence[str], append_linesep: bool = True
|
||||
output_file: Path,
|
||||
files_to_merge: Sequence[Path],
|
||||
) -> None:
|
||||
with open(output_file_path, "w") as output_file:
|
||||
for merged_file_path in merged_file_paths:
|
||||
with open(merged_file_path) as merged_file:
|
||||
merged_file_content = merged_file.read()
|
||||
output_file.write(merged_file_content)
|
||||
if append_linesep:
|
||||
output_file.write(os.linesep)
|
||||
|
||||
|
||||
def main():
|
||||
merge(DOTENV_FILE_PATH, PRODUCTION_DOTENV_FILE_PATHS)
|
||||
|
||||
|
||||
@pytest.mark.parametrize("merged_file_count", range(3))
|
||||
@pytest.mark.parametrize("append_linesep", [True, False])
|
||||
def test_merge(tmpdir_factory, merged_file_count: int, append_linesep: bool):
|
||||
tmp_dir_path = Path(str(tmpdir_factory.getbasetemp()))
|
||||
|
||||
output_file_path = tmp_dir_path / ".env"
|
||||
|
||||
expected_output_file_content = ""
|
||||
merged_file_paths = []
|
||||
for i in range(merged_file_count):
|
||||
merged_file_ord = i + 1
|
||||
|
||||
merged_filename = f".service{merged_file_ord}"
|
||||
merged_file_path = tmp_dir_path / merged_filename
|
||||
|
||||
merged_file_content = merged_filename * merged_file_ord
|
||||
|
||||
with open(merged_file_path, "w+") as file:
|
||||
file.write(merged_file_content)
|
||||
|
||||
expected_output_file_content += merged_file_content
|
||||
if append_linesep:
|
||||
expected_output_file_content += os.linesep
|
||||
|
||||
merged_file_paths.append(merged_file_path)
|
||||
|
||||
merge(output_file_path, merged_file_paths, append_linesep)
|
||||
|
||||
with open(output_file_path) as output_file:
|
||||
actual_output_file_content = output_file.read()
|
||||
|
||||
assert actual_output_file_content == expected_output_file_content
|
||||
merged_content = ""
|
||||
for merge_file in files_to_merge:
|
||||
merged_content += merge_file.read_text()
|
||||
merged_content += os.linesep
|
||||
output_file.write_text(merged_content)
|
||||
|
||||
|
||||
if __name__ == "__main__":
|
||||
main()
|
||||
merge(DOTENV_FILE, PRODUCTION_DOTENV_FILES)
|
||||
|
|
|
@ -0,0 +1,34 @@
|
|||
from pathlib import Path
|
||||
|
||||
import pytest
|
||||
|
||||
from merge_production_dotenvs_in_dotenv import merge
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
("input_contents", "expected_output"),
|
||||
[
|
||||
([], ""),
|
||||
([""], "\n"),
|
||||
(["JANE=doe"], "JANE=doe\n"),
|
||||
(["SEP=true", "AR=ator"], "SEP=true\nAR=ator\n"),
|
||||
(["A=0", "B=1", "C=2"], "A=0\nB=1\nC=2\n"),
|
||||
(["X=x\n", "Y=y", "Z=z\n"], "X=x\n\nY=y\nZ=z\n\n"),
|
||||
],
|
||||
)
|
||||
def test_merge(
|
||||
tmp_path: Path,
|
||||
input_contents: list[str],
|
||||
expected_output: str,
|
||||
):
|
||||
output_file = tmp_path / ".env"
|
||||
|
||||
files_to_merge = []
|
||||
for num, input_content in enumerate(input_contents, start=1):
|
||||
merge_file = tmp_path / f".service{num}"
|
||||
merge_file.write_text(input_content)
|
||||
files_to_merge.append(merge_file)
|
||||
|
||||
merge(output_file, files_to_merge)
|
||||
|
||||
assert output_file.read_text() == expected_output
|
Loading…
Reference in New Issue
Block a user