mirror of
https://github.com/cookiecutter/cookiecutter-django.git
synced 2025-07-16 19:12:25 +03:00
Run Ruff with --add-noqa
This commit is contained in:
parent
9297598613
commit
7d1e843d93
88
docs/conf.py
88
docs/conf.py
|
@ -9,17 +9,17 @@
|
||||||
# serve to show the default.
|
# serve to show the default.
|
||||||
from datetime import datetime
|
from datetime import datetime
|
||||||
|
|
||||||
now = datetime.now()
|
now = datetime.now() # noqa: DTZ005
|
||||||
|
|
||||||
# If extensions (or modules to document with autodoc) are in another directory,
|
# If extensions (or modules to document with autodoc) are in another directory,
|
||||||
# add these directories to sys.path here. If the directory is relative to the
|
# add these directories to sys.path here. If the directory is relative to the
|
||||||
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
# documentation root, use os.path.abspath to make it absolute, like shown here.
|
||||||
# sys.path.insert(0, os.path.abspath('.'))
|
# sys.path.insert(0, os.path.abspath('.')) # noqa: ERA001
|
||||||
|
|
||||||
# -- General configuration -----------------------------------------------------
|
# -- General configuration -----------------------------------------------------
|
||||||
|
|
||||||
# If your documentation needs a minimal Sphinx version, state it here.
|
# If your documentation needs a minimal Sphinx version, state it here.
|
||||||
# needs_sphinx = '1.0'
|
# needs_sphinx = '1.0' # noqa: ERA001
|
||||||
|
|
||||||
# Add any Sphinx extension module names here, as strings. They can be extensions
|
# Add any Sphinx extension module names here, as strings. They can be extensions
|
||||||
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
# coming with Sphinx (named 'sphinx.ext.*') or your custom ones.
|
||||||
|
@ -35,14 +35,14 @@ source_suffix = {
|
||||||
}
|
}
|
||||||
|
|
||||||
# The encoding of source files.
|
# The encoding of source files.
|
||||||
# source_encoding = 'utf-8-sig'
|
# source_encoding = 'utf-8-sig' # noqa: ERA001
|
||||||
|
|
||||||
# The master toctree document.
|
# The master toctree document.
|
||||||
master_doc = "index"
|
master_doc = "index"
|
||||||
|
|
||||||
# General information about the project.
|
# General information about the project.
|
||||||
project = "Cookiecutter Django"
|
project = "Cookiecutter Django"
|
||||||
copyright = f"2013-{now.year}, Daniel Roy Greenfeld"
|
copyright = f"2013-{now.year}, Daniel Roy Greenfeld" # noqa: A001
|
||||||
|
|
||||||
# The version info for the project you're documenting, acts as replacement for
|
# The version info for the project you're documenting, acts as replacement for
|
||||||
# |version| and |release|, also used in various other places throughout the
|
# |version| and |release|, also used in various other places throughout the
|
||||||
|
@ -55,37 +55,37 @@ release = "{}.{}.{}".format(*now.isocalendar())
|
||||||
|
|
||||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||||
# for a list of supported languages.
|
# for a list of supported languages.
|
||||||
# language = None
|
# language = None # noqa: ERA001
|
||||||
|
|
||||||
# There are two options for replacing |today|: either, you set today to some
|
# There are two options for replacing |today|: either, you set today to some
|
||||||
# non-false value, then it is used:
|
# non-false value, then it is used:
|
||||||
# today = ''
|
# today = '' # noqa: ERA001
|
||||||
# Else, today_fmt is used as the format for a strftime call.
|
# Else, today_fmt is used as the format for a strftime call.
|
||||||
# today_fmt = '%B %d, %Y'
|
# today_fmt = '%B %d, %Y' # noqa: ERA001
|
||||||
|
|
||||||
# List of patterns, relative to source directory, that match files and
|
# List of patterns, relative to source directory, that match files and
|
||||||
# directories to ignore when looking for source files.
|
# directories to ignore when looking for source files.
|
||||||
exclude_patterns = ["_build"]
|
exclude_patterns = ["_build"]
|
||||||
|
|
||||||
# The reST default role (used for this markup: `text`) to use for all documents.
|
# The reST default role (used for this markup: `text`) to use for all documents.
|
||||||
# default_role = None
|
# default_role = None # noqa: ERA001
|
||||||
|
|
||||||
# If true, '()' will be appended to :func: etc. cross-reference text.
|
# If true, '()' will be appended to :func: etc. cross-reference text.
|
||||||
# add_function_parentheses = True
|
# add_function_parentheses = True # noqa: ERA001
|
||||||
|
|
||||||
# If true, the current module name will be prepended to all description
|
# If true, the current module name will be prepended to all description
|
||||||
# unit titles (such as .. function::).
|
# unit titles (such as .. function::).
|
||||||
# add_module_names = True
|
# add_module_names = True # noqa: ERA001
|
||||||
|
|
||||||
# If true, sectionauthor and moduleauthor directives will be shown in the
|
# If true, sectionauthor and moduleauthor directives will be shown in the
|
||||||
# output. They are ignored by default.
|
# output. They are ignored by default.
|
||||||
# show_authors = False
|
# show_authors = False # noqa: ERA001
|
||||||
|
|
||||||
# The name of the Pygments (syntax highlighting) style to use.
|
# The name of the Pygments (syntax highlighting) style to use.
|
||||||
pygments_style = "sphinx"
|
pygments_style = "sphinx"
|
||||||
|
|
||||||
# A list of ignored prefixes for module index sorting.
|
# A list of ignored prefixes for module index sorting.
|
||||||
# modindex_common_prefix = []
|
# modindex_common_prefix = [] # noqa: ERA001
|
||||||
|
|
||||||
|
|
||||||
# -- Options for HTML output ---------------------------------------------------
|
# -- Options for HTML output ---------------------------------------------------
|
||||||
|
@ -97,26 +97,26 @@ html_theme = "sphinx_rtd_theme"
|
||||||
# Theme options are theme-specific and customize the look and feel of a theme
|
# Theme options are theme-specific and customize the look and feel of a theme
|
||||||
# further. For a list of options available for each theme, see the
|
# further. For a list of options available for each theme, see the
|
||||||
# documentation.
|
# documentation.
|
||||||
# html_theme_options = {}
|
# html_theme_options = {} # noqa: ERA001
|
||||||
|
|
||||||
# Add any paths that contain custom themes here, relative to this directory.
|
# Add any paths that contain custom themes here, relative to this directory.
|
||||||
# html_theme_path = []
|
# html_theme_path = [] # noqa: ERA001
|
||||||
|
|
||||||
# The name for this set of Sphinx documents. If None, it defaults to
|
# The name for this set of Sphinx documents. If None, it defaults to
|
||||||
# "<project> v<release> documentation".
|
# "<project> v<release> documentation".
|
||||||
# html_title = None
|
# html_title = None # noqa: ERA001
|
||||||
|
|
||||||
# A shorter title for the navigation bar. Default is the same as html_title.
|
# A shorter title for the navigation bar. Default is the same as html_title.
|
||||||
# html_short_title = None
|
# html_short_title = None # noqa: ERA001
|
||||||
|
|
||||||
# The name of an image file (relative to this directory) to place at the top
|
# The name of an image file (relative to this directory) to place at the top
|
||||||
# of the sidebar.
|
# of the sidebar.
|
||||||
# html_logo = None
|
# html_logo = None # noqa: ERA001
|
||||||
|
|
||||||
# The name of an image file (within the static path) to use as favicon of the
|
# The name of an image file (within the static path) to use as favicon of the
|
||||||
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
# docs. This file should be a Windows icon file (.ico) being 16x16 or 32x32
|
||||||
# pixels large.
|
# pixels large.
|
||||||
# html_favicon = None
|
# html_favicon = None # noqa: ERA001
|
||||||
|
|
||||||
# Add any paths that contain custom static files (such as style sheets) here,
|
# Add any paths that contain custom static files (such as style sheets) here,
|
||||||
# relative to this directory. They are copied after the builtin static files,
|
# relative to this directory. They are copied after the builtin static files,
|
||||||
|
@ -125,44 +125,44 @@ html_static_path = ["_static"]
|
||||||
|
|
||||||
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
# If not '', a 'Last updated on:' timestamp is inserted at every page bottom,
|
||||||
# using the given strftime format.
|
# using the given strftime format.
|
||||||
# html_last_updated_fmt = '%b %d, %Y'
|
# html_last_updated_fmt = '%b %d, %Y' # noqa: ERA001
|
||||||
|
|
||||||
# If true, SmartyPants will be used to convert quotes and dashes to
|
# If true, SmartyPants will be used to convert quotes and dashes to
|
||||||
# typographically correct entities.
|
# typographically correct entities.
|
||||||
# html_use_smartypants = True
|
# html_use_smartypants = True # noqa: ERA001
|
||||||
|
|
||||||
# Custom sidebar templates, maps document names to template names.
|
# Custom sidebar templates, maps document names to template names.
|
||||||
# html_sidebars = {}
|
# html_sidebars = {} # noqa: ERA001
|
||||||
|
|
||||||
# Additional templates that should be rendered to pages, maps page names to
|
# Additional templates that should be rendered to pages, maps page names to
|
||||||
# template names.
|
# template names.
|
||||||
# html_additional_pages = {}
|
# html_additional_pages = {} # noqa: ERA001
|
||||||
|
|
||||||
# If false, no module index is generated.
|
# If false, no module index is generated.
|
||||||
# html_domain_indices = True
|
# html_domain_indices = True # noqa: ERA001
|
||||||
|
|
||||||
# If false, no index is generated.
|
# If false, no index is generated.
|
||||||
# html_use_index = True
|
# html_use_index = True # noqa: ERA001
|
||||||
|
|
||||||
# If true, the index is split into individual pages for each letter.
|
# If true, the index is split into individual pages for each letter.
|
||||||
# html_split_index = False
|
# html_split_index = False # noqa: ERA001
|
||||||
|
|
||||||
# If true, links to the reST sources are added to the pages.
|
# If true, links to the reST sources are added to the pages.
|
||||||
# html_show_sourcelink = True
|
# html_show_sourcelink = True # noqa: ERA001
|
||||||
|
|
||||||
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
# If true, "Created using Sphinx" is shown in the HTML footer. Default is True.
|
||||||
# html_show_sphinx = True
|
# html_show_sphinx = True # noqa: ERA001
|
||||||
|
|
||||||
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
# If true, "(C) Copyright ..." is shown in the HTML footer. Default is True.
|
||||||
# html_show_copyright = True
|
# html_show_copyright = True # noqa: ERA001
|
||||||
|
|
||||||
# If true, an OpenSearch description file will be output, and all pages will
|
# If true, an OpenSearch description file will be output, and all pages will
|
||||||
# contain a <link> tag referring to it. The value of this option must be the
|
# contain a <link> tag referring to it. The value of this option must be the
|
||||||
# base URL from which the finished HTML is served.
|
# base URL from which the finished HTML is served.
|
||||||
# html_use_opensearch = ''
|
# html_use_opensearch = '' # noqa: ERA001
|
||||||
|
|
||||||
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
# This is the file name suffix for HTML files (e.g. ".xhtml").
|
||||||
# html_file_suffix = None
|
# html_file_suffix = None # noqa: ERA001
|
||||||
|
|
||||||
# Output file base name for HTML help builder.
|
# Output file base name for HTML help builder.
|
||||||
htmlhelp_basename = "cookiecutter-djangodoc"
|
htmlhelp_basename = "cookiecutter-djangodoc"
|
||||||
|
@ -172,11 +172,11 @@ htmlhelp_basename = "cookiecutter-djangodoc"
|
||||||
|
|
||||||
latex_elements = {
|
latex_elements = {
|
||||||
# The paper size ('letterpaper' or 'a4paper').
|
# The paper size ('letterpaper' or 'a4paper').
|
||||||
# 'papersize': 'letterpaper',
|
# 'papersize': 'letterpaper', # noqa: ERA001
|
||||||
# The font size ('10pt', '11pt' or '12pt').
|
# The font size ('10pt', '11pt' or '12pt').
|
||||||
# 'pointsize': '10pt',
|
# 'pointsize': '10pt', # noqa: ERA001
|
||||||
# Additional stuff for the LaTeX preamble.
|
# Additional stuff for the LaTeX preamble.
|
||||||
# 'preamble': '',
|
# 'preamble': '', # noqa: ERA001
|
||||||
}
|
}
|
||||||
|
|
||||||
# Grouping the document tree into LaTeX files. List of tuples
|
# Grouping the document tree into LaTeX files. List of tuples
|
||||||
|
@ -193,23 +193,23 @@ latex_documents = [
|
||||||
|
|
||||||
# The name of an image file (relative to this directory) to place at the top of
|
# The name of an image file (relative to this directory) to place at the top of
|
||||||
# the title page.
|
# the title page.
|
||||||
# latex_logo = None
|
# latex_logo = None # noqa: ERA001
|
||||||
|
|
||||||
# For "manual" documents, if this is true, then toplevel headings are parts,
|
# For "manual" documents, if this is true, then toplevel headings are parts,
|
||||||
# not chapters.
|
# not chapters.
|
||||||
# latex_use_parts = False
|
# latex_use_parts = False # noqa: ERA001
|
||||||
|
|
||||||
# If true, show page references after internal links.
|
# If true, show page references after internal links.
|
||||||
# latex_show_pagerefs = False
|
# latex_show_pagerefs = False # noqa: ERA001
|
||||||
|
|
||||||
# If true, show URL addresses after external links.
|
# If true, show URL addresses after external links.
|
||||||
# latex_show_urls = False
|
# latex_show_urls = False # noqa: ERA001
|
||||||
|
|
||||||
# Documents to append as an appendix to all manuals.
|
# Documents to append as an appendix to all manuals.
|
||||||
# latex_appendices = []
|
# latex_appendices = [] # noqa: ERA001
|
||||||
|
|
||||||
# If false, no module index is generated.
|
# If false, no module index is generated.
|
||||||
# latex_domain_indices = True
|
# latex_domain_indices = True # noqa: ERA001
|
||||||
|
|
||||||
|
|
||||||
# -- Options for manual page output --------------------------------------------
|
# -- Options for manual page output --------------------------------------------
|
||||||
|
@ -227,7 +227,7 @@ man_pages = [
|
||||||
]
|
]
|
||||||
|
|
||||||
# If true, show URL addresses after external links.
|
# If true, show URL addresses after external links.
|
||||||
# man_show_urls = False
|
# man_show_urls = False # noqa: ERA001
|
||||||
|
|
||||||
|
|
||||||
# -- Options for Texinfo output ------------------------------------------------
|
# -- Options for Texinfo output ------------------------------------------------
|
||||||
|
@ -248,10 +248,10 @@ texinfo_documents = [
|
||||||
]
|
]
|
||||||
|
|
||||||
# Documents to append as an appendix to all manuals.
|
# Documents to append as an appendix to all manuals.
|
||||||
# texinfo_appendices = []
|
# texinfo_appendices = [] # noqa: ERA001
|
||||||
|
|
||||||
# If false, no module index is generated.
|
# If false, no module index is generated.
|
||||||
# texinfo_domain_indices = True
|
# texinfo_domain_indices = True # noqa: ERA001
|
||||||
|
|
||||||
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
# How to display URL addresses: 'footnote', 'no', or 'inline'.
|
||||||
# texinfo_show_urls = 'footnote'
|
# texinfo_show_urls = 'footnote' # noqa: ERA001
|
||||||
|
|
|
@ -24,25 +24,25 @@ DEBUG_VALUE = "debug"
|
||||||
def remove_open_source_files():
|
def remove_open_source_files():
|
||||||
file_names = ["CONTRIBUTORS.txt", "LICENSE"]
|
file_names = ["CONTRIBUTORS.txt", "LICENSE"]
|
||||||
for file_name in file_names:
|
for file_name in file_names:
|
||||||
os.remove(file_name)
|
os.remove(file_name) # noqa: PTH107
|
||||||
|
|
||||||
|
|
||||||
def remove_gplv3_files():
|
def remove_gplv3_files():
|
||||||
file_names = ["COPYING"]
|
file_names = ["COPYING"]
|
||||||
for file_name in file_names:
|
for file_name in file_names:
|
||||||
os.remove(file_name)
|
os.remove(file_name) # noqa: PTH107
|
||||||
|
|
||||||
|
|
||||||
def remove_custom_user_manager_files():
|
def remove_custom_user_manager_files():
|
||||||
os.remove(
|
os.remove( # noqa: PTH107
|
||||||
os.path.join(
|
os.path.join( # noqa: PTH118
|
||||||
"{{cookiecutter.project_slug}}",
|
"{{cookiecutter.project_slug}}",
|
||||||
"users",
|
"users",
|
||||||
"managers.py",
|
"managers.py",
|
||||||
),
|
),
|
||||||
)
|
)
|
||||||
os.remove(
|
os.remove( # noqa: PTH107
|
||||||
os.path.join(
|
os.path.join( # noqa: PTH118
|
||||||
"{{cookiecutter.project_slug}}",
|
"{{cookiecutter.project_slug}}",
|
||||||
"users",
|
"users",
|
||||||
"tests",
|
"tests",
|
||||||
|
@ -53,11 +53,11 @@ def remove_custom_user_manager_files():
|
||||||
|
|
||||||
def remove_pycharm_files():
|
def remove_pycharm_files():
|
||||||
idea_dir_path = ".idea"
|
idea_dir_path = ".idea"
|
||||||
if os.path.exists(idea_dir_path):
|
if os.path.exists(idea_dir_path): # noqa: PTH110
|
||||||
shutil.rmtree(idea_dir_path)
|
shutil.rmtree(idea_dir_path)
|
||||||
|
|
||||||
docs_dir_path = os.path.join("docs", "pycharm")
|
docs_dir_path = os.path.join("docs", "pycharm") # noqa: PTH118
|
||||||
if os.path.exists(docs_dir_path):
|
if os.path.exists(docs_dir_path): # noqa: PTH110
|
||||||
shutil.rmtree(docs_dir_path)
|
shutil.rmtree(docs_dir_path)
|
||||||
|
|
||||||
|
|
||||||
|
@ -71,15 +71,15 @@ def remove_docker_files():
|
||||||
".dockerignore",
|
".dockerignore",
|
||||||
]
|
]
|
||||||
for file_name in file_names:
|
for file_name in file_names:
|
||||||
os.remove(file_name)
|
os.remove(file_name) # noqa: PTH107
|
||||||
if "{{ cookiecutter.editor }}" == "PyCharm":
|
if "{{ cookiecutter.editor }}" == "PyCharm": # noqa: PLR0133
|
||||||
file_names = ["docker_compose_up_django.xml", "docker_compose_up_docs.xml"]
|
file_names = ["docker_compose_up_django.xml", "docker_compose_up_docs.xml"]
|
||||||
for file_name in file_names:
|
for file_name in file_names:
|
||||||
os.remove(os.path.join(".idea", "runConfigurations", file_name))
|
os.remove(os.path.join(".idea", "runConfigurations", file_name)) # noqa: PTH107, PTH118
|
||||||
|
|
||||||
|
|
||||||
def remove_nginx_docker_files():
|
def remove_nginx_docker_files():
|
||||||
shutil.rmtree(os.path.join("compose", "production", "nginx"))
|
shutil.rmtree(os.path.join("compose", "production", "nginx")) # noqa: PTH118
|
||||||
|
|
||||||
|
|
||||||
def remove_utility_files():
|
def remove_utility_files():
|
||||||
|
@ -92,18 +92,18 @@ def remove_heroku_files():
|
||||||
if file_name == "requirements.txt" and "{{ cookiecutter.ci_tool }}".lower() == "travis":
|
if file_name == "requirements.txt" and "{{ cookiecutter.ci_tool }}".lower() == "travis":
|
||||||
# don't remove the file if we are using travisci but not using heroku
|
# don't remove the file if we are using travisci but not using heroku
|
||||||
continue
|
continue
|
||||||
os.remove(file_name)
|
os.remove(file_name) # noqa: PTH107
|
||||||
shutil.rmtree("bin")
|
shutil.rmtree("bin")
|
||||||
|
|
||||||
|
|
||||||
def remove_sass_files():
|
def remove_sass_files():
|
||||||
shutil.rmtree(os.path.join("{{cookiecutter.project_slug}}", "static", "sass"))
|
shutil.rmtree(os.path.join("{{cookiecutter.project_slug}}", "static", "sass")) # noqa: PTH118
|
||||||
|
|
||||||
|
|
||||||
def remove_gulp_files():
|
def remove_gulp_files():
|
||||||
file_names = ["gulpfile.mjs"]
|
file_names = ["gulpfile.mjs"]
|
||||||
for file_name in file_names:
|
for file_name in file_names:
|
||||||
os.remove(file_name)
|
os.remove(file_name) # noqa: PTH107
|
||||||
|
|
||||||
|
|
||||||
def remove_webpack_files():
|
def remove_webpack_files():
|
||||||
|
@ -112,34 +112,34 @@ def remove_webpack_files():
|
||||||
|
|
||||||
|
|
||||||
def remove_vendors_js():
|
def remove_vendors_js():
|
||||||
vendors_js_path = os.path.join(
|
vendors_js_path = os.path.join( # noqa: PTH118
|
||||||
"{{ cookiecutter.project_slug }}",
|
"{{ cookiecutter.project_slug }}",
|
||||||
"static",
|
"static",
|
||||||
"js",
|
"js",
|
||||||
"vendors.js",
|
"vendors.js",
|
||||||
)
|
)
|
||||||
if os.path.exists(vendors_js_path):
|
if os.path.exists(vendors_js_path): # noqa: PTH110
|
||||||
os.remove(vendors_js_path)
|
os.remove(vendors_js_path) # noqa: PTH107
|
||||||
|
|
||||||
|
|
||||||
def remove_packagejson_file():
|
def remove_packagejson_file():
|
||||||
file_names = ["package.json"]
|
file_names = ["package.json"]
|
||||||
for file_name in file_names:
|
for file_name in file_names:
|
||||||
os.remove(file_name)
|
os.remove(file_name) # noqa: PTH107
|
||||||
|
|
||||||
|
|
||||||
def update_package_json(remove_dev_deps=None, remove_keys=None, scripts=None):
|
def update_package_json(remove_dev_deps=None, remove_keys=None, scripts=None):
|
||||||
remove_dev_deps = remove_dev_deps or []
|
remove_dev_deps = remove_dev_deps or []
|
||||||
remove_keys = remove_keys or []
|
remove_keys = remove_keys or []
|
||||||
scripts = scripts or {}
|
scripts = scripts or {}
|
||||||
with open("package.json") as fd:
|
with open("package.json") as fd: # noqa: PTH123
|
||||||
content = json.load(fd)
|
content = json.load(fd)
|
||||||
for package_name in remove_dev_deps:
|
for package_name in remove_dev_deps:
|
||||||
content["devDependencies"].pop(package_name)
|
content["devDependencies"].pop(package_name)
|
||||||
for key in remove_keys:
|
for key in remove_keys:
|
||||||
content.pop(key)
|
content.pop(key)
|
||||||
content["scripts"].update(scripts)
|
content["scripts"].update(scripts)
|
||||||
with open("package.json", mode="w") as fd:
|
with open("package.json", mode="w") as fd: # noqa: PTH123
|
||||||
json.dump(content, fd, ensure_ascii=False, indent=2)
|
json.dump(content, fd, ensure_ascii=False, indent=2)
|
||||||
fd.write("\n")
|
fd.write("\n")
|
||||||
|
|
||||||
|
@ -205,7 +205,7 @@ def handle_js_runner(choice, use_docker, use_async):
|
||||||
|
|
||||||
|
|
||||||
def remove_prettier_pre_commit():
|
def remove_prettier_pre_commit():
|
||||||
with open(".pre-commit-config.yaml") as fd:
|
with open(".pre-commit-config.yaml") as fd: # noqa: PTH123
|
||||||
content = fd.readlines()
|
content = fd.readlines()
|
||||||
|
|
||||||
removing = False
|
removing = False
|
||||||
|
@ -218,35 +218,35 @@ def remove_prettier_pre_commit():
|
||||||
if not removing:
|
if not removing:
|
||||||
new_lines.append(line)
|
new_lines.append(line)
|
||||||
|
|
||||||
with open(".pre-commit-config.yaml", "w") as fd:
|
with open(".pre-commit-config.yaml", "w") as fd: # noqa: PTH123
|
||||||
fd.writelines(new_lines)
|
fd.writelines(new_lines)
|
||||||
|
|
||||||
|
|
||||||
def remove_celery_files():
|
def remove_celery_files():
|
||||||
file_names = [
|
file_names = [
|
||||||
os.path.join("config", "celery_app.py"),
|
os.path.join("config", "celery_app.py"), # noqa: PTH118
|
||||||
os.path.join("{{ cookiecutter.project_slug }}", "users", "tasks.py"),
|
os.path.join("{{ cookiecutter.project_slug }}", "users", "tasks.py"), # noqa: PTH118
|
||||||
os.path.join("{{ cookiecutter.project_slug }}", "users", "tests", "test_tasks.py"),
|
os.path.join("{{ cookiecutter.project_slug }}", "users", "tests", "test_tasks.py"), # noqa: PTH118
|
||||||
]
|
]
|
||||||
for file_name in file_names:
|
for file_name in file_names:
|
||||||
os.remove(file_name)
|
os.remove(file_name) # noqa: PTH107
|
||||||
|
|
||||||
|
|
||||||
def remove_async_files():
|
def remove_async_files():
|
||||||
file_names = [
|
file_names = [
|
||||||
os.path.join("config", "asgi.py"),
|
os.path.join("config", "asgi.py"), # noqa: PTH118
|
||||||
os.path.join("config", "websocket.py"),
|
os.path.join("config", "websocket.py"), # noqa: PTH118
|
||||||
]
|
]
|
||||||
for file_name in file_names:
|
for file_name in file_names:
|
||||||
os.remove(file_name)
|
os.remove(file_name) # noqa: PTH107
|
||||||
|
|
||||||
|
|
||||||
def remove_dottravisyml_file():
|
def remove_dottravisyml_file():
|
||||||
os.remove(".travis.yml")
|
os.remove(".travis.yml") # noqa: PTH107
|
||||||
|
|
||||||
|
|
||||||
def remove_dotgitlabciyml_file():
|
def remove_dotgitlabciyml_file():
|
||||||
os.remove(".gitlab-ci.yml")
|
os.remove(".gitlab-ci.yml") # noqa: PTH107
|
||||||
|
|
||||||
|
|
||||||
def remove_dotgithub_folder():
|
def remove_dotgithub_folder():
|
||||||
|
@ -254,10 +254,10 @@ def remove_dotgithub_folder():
|
||||||
|
|
||||||
|
|
||||||
def remove_dotdrone_file():
|
def remove_dotdrone_file():
|
||||||
os.remove(".drone.yml")
|
os.remove(".drone.yml") # noqa: PTH107
|
||||||
|
|
||||||
|
|
||||||
def generate_random_string(length, using_digits=False, using_ascii_letters=False, using_punctuation=False):
|
def generate_random_string(length, using_digits=False, using_ascii_letters=False, using_punctuation=False): # noqa: FBT002
|
||||||
"""
|
"""
|
||||||
Example:
|
Example:
|
||||||
opting out for 50 symbol-long, [a-z][A-Z][0-9] string
|
opting out for 50 symbol-long, [a-z][A-Z][0-9] string
|
||||||
|
@ -289,7 +289,7 @@ def set_flag(file_path, flag, value=None, formatted=None, *args, **kwargs):
|
||||||
random_string = formatted.format(random_string)
|
random_string = formatted.format(random_string)
|
||||||
value = random_string
|
value = random_string
|
||||||
|
|
||||||
with open(file_path, "r+") as f:
|
with open(file_path, "r+") as f: # noqa: PTH123
|
||||||
file_contents = f.read().replace(flag, value)
|
file_contents = f.read().replace(flag, value)
|
||||||
f.seek(0)
|
f.seek(0)
|
||||||
f.write(file_contents)
|
f.write(file_contents)
|
||||||
|
@ -323,7 +323,7 @@ def generate_random_user():
|
||||||
return generate_random_string(length=32, using_ascii_letters=True)
|
return generate_random_string(length=32, using_ascii_letters=True)
|
||||||
|
|
||||||
|
|
||||||
def generate_postgres_user(debug=False):
|
def generate_postgres_user(debug=False): # noqa: FBT002
|
||||||
return DEBUG_VALUE if debug else generate_random_user()
|
return DEBUG_VALUE if debug else generate_random_user()
|
||||||
|
|
||||||
|
|
||||||
|
@ -358,16 +358,16 @@ def set_celery_flower_password(file_path, value=None):
|
||||||
|
|
||||||
|
|
||||||
def append_to_gitignore_file(ignored_line):
|
def append_to_gitignore_file(ignored_line):
|
||||||
with open(".gitignore", "a") as gitignore_file:
|
with open(".gitignore", "a") as gitignore_file: # noqa: PTH123
|
||||||
gitignore_file.write(ignored_line)
|
gitignore_file.write(ignored_line)
|
||||||
gitignore_file.write("\n")
|
gitignore_file.write("\n")
|
||||||
|
|
||||||
|
|
||||||
def set_flags_in_envs(postgres_user, celery_flower_user, debug=False):
|
def set_flags_in_envs(postgres_user, celery_flower_user, debug=False): # noqa: FBT002
|
||||||
local_django_envs_path = os.path.join(".envs", ".local", ".django")
|
local_django_envs_path = os.path.join(".envs", ".local", ".django") # noqa: PTH118
|
||||||
production_django_envs_path = os.path.join(".envs", ".production", ".django")
|
production_django_envs_path = os.path.join(".envs", ".production", ".django") # noqa: PTH118
|
||||||
local_postgres_envs_path = os.path.join(".envs", ".local", ".postgres")
|
local_postgres_envs_path = os.path.join(".envs", ".local", ".postgres") # noqa: PTH118
|
||||||
production_postgres_envs_path = os.path.join(".envs", ".production", ".postgres")
|
production_postgres_envs_path = os.path.join(".envs", ".production", ".postgres") # noqa: PTH118
|
||||||
|
|
||||||
set_django_secret_key(production_django_envs_path)
|
set_django_secret_key(production_django_envs_path)
|
||||||
set_django_admin_url(production_django_envs_path)
|
set_django_admin_url(production_django_envs_path)
|
||||||
|
@ -384,38 +384,38 @@ def set_flags_in_envs(postgres_user, celery_flower_user, debug=False):
|
||||||
|
|
||||||
|
|
||||||
def set_flags_in_settings_files():
|
def set_flags_in_settings_files():
|
||||||
set_django_secret_key(os.path.join("config", "settings", "local.py"))
|
set_django_secret_key(os.path.join("config", "settings", "local.py")) # noqa: PTH118
|
||||||
set_django_secret_key(os.path.join("config", "settings", "test.py"))
|
set_django_secret_key(os.path.join("config", "settings", "test.py")) # noqa: PTH118
|
||||||
|
|
||||||
|
|
||||||
def remove_envs_and_associated_files():
|
def remove_envs_and_associated_files():
|
||||||
shutil.rmtree(".envs")
|
shutil.rmtree(".envs")
|
||||||
os.remove("merge_production_dotenvs_in_dotenv.py")
|
os.remove("merge_production_dotenvs_in_dotenv.py") # noqa: PTH107
|
||||||
shutil.rmtree("tests")
|
shutil.rmtree("tests")
|
||||||
|
|
||||||
|
|
||||||
def remove_celery_compose_dirs():
|
def remove_celery_compose_dirs():
|
||||||
shutil.rmtree(os.path.join("compose", "local", "django", "celery"))
|
shutil.rmtree(os.path.join("compose", "local", "django", "celery")) # noqa: PTH118
|
||||||
shutil.rmtree(os.path.join("compose", "production", "django", "celery"))
|
shutil.rmtree(os.path.join("compose", "production", "django", "celery")) # noqa: PTH118
|
||||||
|
|
||||||
|
|
||||||
def remove_node_dockerfile():
|
def remove_node_dockerfile():
|
||||||
shutil.rmtree(os.path.join("compose", "local", "node"))
|
shutil.rmtree(os.path.join("compose", "local", "node")) # noqa: PTH118
|
||||||
|
|
||||||
|
|
||||||
def remove_aws_dockerfile():
|
def remove_aws_dockerfile():
|
||||||
shutil.rmtree(os.path.join("compose", "production", "aws"))
|
shutil.rmtree(os.path.join("compose", "production", "aws")) # noqa: PTH118
|
||||||
|
|
||||||
|
|
||||||
def remove_drf_starter_files():
|
def remove_drf_starter_files():
|
||||||
os.remove(os.path.join("config", "api_router.py"))
|
os.remove(os.path.join("config", "api_router.py")) # noqa: PTH107, PTH118
|
||||||
shutil.rmtree(os.path.join("{{cookiecutter.project_slug}}", "users", "api"))
|
shutil.rmtree(os.path.join("{{cookiecutter.project_slug}}", "users", "api")) # noqa: PTH118
|
||||||
os.remove(os.path.join("{{cookiecutter.project_slug}}", "users", "tests", "test_drf_urls.py"))
|
os.remove(os.path.join("{{cookiecutter.project_slug}}", "users", "tests", "test_drf_urls.py")) # noqa: PTH107, PTH118
|
||||||
os.remove(os.path.join("{{cookiecutter.project_slug}}", "users", "tests", "test_drf_views.py"))
|
os.remove(os.path.join("{{cookiecutter.project_slug}}", "users", "tests", "test_drf_views.py")) # noqa: PTH107, PTH118
|
||||||
os.remove(os.path.join("{{cookiecutter.project_slug}}", "users", "tests", "test_swagger.py"))
|
os.remove(os.path.join("{{cookiecutter.project_slug}}", "users", "tests", "test_swagger.py")) # noqa: PTH107, PTH118
|
||||||
|
|
||||||
|
|
||||||
def main():
|
def main(): # noqa: C901, PLR0912, PLR0915
|
||||||
debug = "{{ cookiecutter.debug }}".lower() == "y"
|
debug = "{{ cookiecutter.debug }}".lower() == "y"
|
||||||
|
|
||||||
set_flags_in_envs(
|
set_flags_in_envs(
|
||||||
|
@ -425,15 +425,15 @@ def main():
|
||||||
)
|
)
|
||||||
set_flags_in_settings_files()
|
set_flags_in_settings_files()
|
||||||
|
|
||||||
if "{{ cookiecutter.open_source_license }}" == "Not open source":
|
if "{{ cookiecutter.open_source_license }}" == "Not open source": # noqa: PLR0133
|
||||||
remove_open_source_files()
|
remove_open_source_files()
|
||||||
if "{{ cookiecutter.open_source_license}}" != "GPLv3":
|
if "{{ cookiecutter.open_source_license}}" != "GPLv3": # noqa: PLR0133
|
||||||
remove_gplv3_files()
|
remove_gplv3_files()
|
||||||
|
|
||||||
if "{{ cookiecutter.username_type }}" == "username":
|
if "{{ cookiecutter.username_type }}" == "username": # noqa: PLR0133
|
||||||
remove_custom_user_manager_files()
|
remove_custom_user_manager_files()
|
||||||
|
|
||||||
if "{{ cookiecutter.editor }}" != "PyCharm":
|
if "{{ cookiecutter.editor }}" != "PyCharm": # noqa: PLR0133
|
||||||
remove_pycharm_files()
|
remove_pycharm_files()
|
||||||
|
|
||||||
if "{{ cookiecutter.use_docker }}".lower() == "y":
|
if "{{ cookiecutter.use_docker }}".lower() == "y":
|
||||||
|
@ -443,7 +443,7 @@ def main():
|
||||||
else:
|
else:
|
||||||
remove_docker_files()
|
remove_docker_files()
|
||||||
|
|
||||||
if "{{ cookiecutter.use_docker }}".lower() == "y" and "{{ cookiecutter.cloud_provider}}" != "AWS":
|
if "{{ cookiecutter.use_docker }}".lower() == "y" and "{{ cookiecutter.cloud_provider}}" != "AWS": # noqa: PLR0133
|
||||||
remove_aws_dockerfile()
|
remove_aws_dockerfile()
|
||||||
|
|
||||||
if "{{ cookiecutter.use_heroku }}".lower() == "n":
|
if "{{ cookiecutter.use_heroku }}".lower() == "n":
|
||||||
|
@ -474,7 +474,7 @@ def main():
|
||||||
use_async=("{{ cookiecutter.use_async }}".lower() == "y"),
|
use_async=("{{ cookiecutter.use_async }}".lower() == "y"),
|
||||||
)
|
)
|
||||||
|
|
||||||
if "{{ cookiecutter.cloud_provider }}" == "None" and "{{ cookiecutter.use_docker }}".lower() == "n":
|
if "{{ cookiecutter.cloud_provider }}" == "None" and "{{ cookiecutter.use_docker }}".lower() == "n": # noqa: PLR0133
|
||||||
pass
|
pass
|
||||||
|
|
||||||
if "{{ cookiecutter.use_celery }}".lower() == "n":
|
if "{{ cookiecutter.use_celery }}".lower() == "n":
|
||||||
|
@ -482,16 +482,16 @@ def main():
|
||||||
if "{{ cookiecutter.use_docker }}".lower() == "y":
|
if "{{ cookiecutter.use_docker }}".lower() == "y":
|
||||||
remove_celery_compose_dirs()
|
remove_celery_compose_dirs()
|
||||||
|
|
||||||
if "{{ cookiecutter.ci_tool }}" != "Travis":
|
if "{{ cookiecutter.ci_tool }}" != "Travis": # noqa: PLR0133
|
||||||
remove_dottravisyml_file()
|
remove_dottravisyml_file()
|
||||||
|
|
||||||
if "{{ cookiecutter.ci_tool }}" != "Gitlab":
|
if "{{ cookiecutter.ci_tool }}" != "Gitlab": # noqa: PLR0133
|
||||||
remove_dotgitlabciyml_file()
|
remove_dotgitlabciyml_file()
|
||||||
|
|
||||||
if "{{ cookiecutter.ci_tool }}" != "Github":
|
if "{{ cookiecutter.ci_tool }}" != "Github": # noqa: PLR0133
|
||||||
remove_dotgithub_folder()
|
remove_dotgithub_folder()
|
||||||
|
|
||||||
if "{{ cookiecutter.ci_tool }}" != "Drone":
|
if "{{ cookiecutter.ci_tool }}" != "Drone": # noqa: PLR0133
|
||||||
remove_dotdrone_file()
|
remove_dotdrone_file()
|
||||||
|
|
||||||
if "{{ cookiecutter.use_drf }}".lower() == "n":
|
if "{{ cookiecutter.use_drf }}".lower() == "n":
|
||||||
|
|
|
@ -20,10 +20,10 @@ if hasattr(project_slug, "isidentifier"):
|
||||||
|
|
||||||
assert project_slug == project_slug.lower(), f"'{project_slug}' project slug should be all lowercase"
|
assert project_slug == project_slug.lower(), f"'{project_slug}' project slug should be all lowercase"
|
||||||
|
|
||||||
assert "\\" not in "{{ cookiecutter.author_name }}", "Don't include backslashes in author name."
|
assert "\\" not in "{{ cookiecutter.author_name }}", "Don't include backslashes in author name." # noqa: PLR0133
|
||||||
|
|
||||||
if "{{ cookiecutter.use_whitenoise }}".lower() == "n" and "{{ cookiecutter.cloud_provider }}" == "None":
|
if "{{ cookiecutter.use_whitenoise }}".lower() == "n" and "{{ cookiecutter.cloud_provider }}" == "None": # noqa: PLR0133
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
||||||
if "{{ cookiecutter.mail_service }}" == "Amazon SES" and "{{ cookiecutter.cloud_provider }}" != "AWS":
|
if "{{ cookiecutter.mail_service }}" == "Amazon SES" and "{{ cookiecutter.cloud_provider }}" != "AWS": # noqa: PLR0133
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
|
|
|
@ -61,7 +61,7 @@ class DjVersion(NamedTuple):
|
||||||
def get_package_info(package: str) -> dict:
|
def get_package_info(package: str) -> dict:
|
||||||
"""Get package metadata using PyPI API."""
|
"""Get package metadata using PyPI API."""
|
||||||
# "django" converts to "Django" on redirect
|
# "django" converts to "Django" on redirect
|
||||||
r = requests.get(f"https://pypi.org/pypi/{package}/json", allow_redirects=True)
|
r = requests.get(f"https://pypi.org/pypi/{package}/json", allow_redirects=True) # noqa: S113
|
||||||
if not r.ok:
|
if not r.ok:
|
||||||
sys.exit(1)
|
sys.exit(1)
|
||||||
return r.json()
|
return r.json()
|
||||||
|
@ -210,9 +210,9 @@ class GitHubManager:
|
||||||
for classifier in package_info["info"]["classifiers"]:
|
for classifier in package_info["info"]["classifiers"]:
|
||||||
# Usually in the form of "Framework :: Django :: 3.2"
|
# Usually in the form of "Framework :: Django :: 3.2"
|
||||||
tokens = classifier.split(" ")
|
tokens = classifier.split(" ")
|
||||||
if len(tokens) >= 5 and tokens[2].lower() == "django" and "." in tokens[4]:
|
if len(tokens) >= 5 and tokens[2].lower() == "django" and "." in tokens[4]: # noqa: PLR2004
|
||||||
version = DjVersion.parse(tokens[4])
|
version = DjVersion.parse(tokens[4])
|
||||||
if len(version) == 2:
|
if len(version) == 2: # noqa: PLR2004
|
||||||
supported_dj_versions.append(version)
|
supported_dj_versions.append(version)
|
||||||
|
|
||||||
if supported_dj_versions:
|
if supported_dj_versions:
|
||||||
|
|
|
@ -23,7 +23,7 @@ def main() -> None:
|
||||||
Script entry point.
|
Script entry point.
|
||||||
"""
|
"""
|
||||||
# Generate changelog for PRs merged yesterday
|
# Generate changelog for PRs merged yesterday
|
||||||
merged_date = dt.date.today() - dt.timedelta(days=1)
|
merged_date = dt.date.today() - dt.timedelta(days=1) # noqa: DTZ011
|
||||||
repo = Github(login_or_token=GITHUB_TOKEN).get_repo(GITHUB_REPO)
|
repo = Github(login_or_token=GITHUB_TOKEN).get_repo(GITHUB_REPO)
|
||||||
merged_pulls = list(iter_pulls(repo, merged_date))
|
merged_pulls = list(iter_pulls(repo, merged_date))
|
||||||
if not merged_pulls:
|
if not merged_pulls:
|
||||||
|
@ -48,7 +48,7 @@ def main() -> None:
|
||||||
|
|
||||||
# Run uv lock
|
# Run uv lock
|
||||||
uv_lock_path = ROOT / "uv.lock"
|
uv_lock_path = ROOT / "uv.lock"
|
||||||
subprocess.run(["uv", "lock", "--no-upgrade"], cwd=ROOT, check=False)
|
subprocess.run(["uv", "lock", "--no-upgrade"], cwd=ROOT, check=False) # noqa: S603, S607
|
||||||
|
|
||||||
# Commit changes, create tag and push
|
# Commit changes, create tag and push
|
||||||
update_git_repo([changelog_path, setup_py_path, uv_lock_path], release)
|
update_git_repo([changelog_path, setup_py_path, uv_lock_path], release)
|
||||||
|
|
|
@ -1,4 +1,4 @@
|
||||||
import glob
|
import glob # noqa: EXE002
|
||||||
import os
|
import os
|
||||||
import re
|
import re
|
||||||
import sys
|
import sys
|
||||||
|
@ -148,7 +148,7 @@ def _fixture_id(ctx):
|
||||||
|
|
||||||
def build_files_list(base_dir):
|
def build_files_list(base_dir):
|
||||||
"""Build a list containing absolute paths to the generated files."""
|
"""Build a list containing absolute paths to the generated files."""
|
||||||
return [os.path.join(dirpath, file_path) for dirpath, subdirs, files in os.walk(base_dir) for file_path in files]
|
return [os.path.join(dirpath, file_path) for dirpath, subdirs, files in os.walk(base_dir) for file_path in files] # noqa: PTH118
|
||||||
|
|
||||||
|
|
||||||
def check_paths(paths):
|
def check_paths(paths):
|
||||||
|
@ -158,7 +158,7 @@ def check_paths(paths):
|
||||||
if is_binary(path):
|
if is_binary(path):
|
||||||
continue
|
continue
|
||||||
|
|
||||||
for line in open(path):
|
for line in open(path): # noqa: SIM115, PTH123
|
||||||
match = RE_OBJ.search(line)
|
match = RE_OBJ.search(line)
|
||||||
assert match is None, f"cookiecutter variable not replaced in {path}"
|
assert match is None, f"cookiecutter variable not replaced in {path}"
|
||||||
|
|
||||||
|
@ -225,7 +225,7 @@ def test_django_upgrade_passes(cookies, context_override):
|
||||||
|
|
||||||
python_files = [
|
python_files = [
|
||||||
file_path.removeprefix(f"{result.project_path}/")
|
file_path.removeprefix(f"{result.project_path}/")
|
||||||
for file_path in glob.glob(str(result.project_path / "**" / "*.py"), recursive=True)
|
for file_path in glob.glob(str(result.project_path / "**" / "*.py"), recursive=True) # noqa: PTH207
|
||||||
]
|
]
|
||||||
try:
|
try:
|
||||||
sh.django_upgrade(
|
sh.django_upgrade(
|
||||||
|
@ -286,7 +286,7 @@ def test_travis_invokes_pytest(cookies, context, use_docker, expected_test_scrip
|
||||||
assert result.project_path.name == context["project_slug"]
|
assert result.project_path.name == context["project_slug"]
|
||||||
assert result.project_path.is_dir()
|
assert result.project_path.is_dir()
|
||||||
|
|
||||||
with open(f"{result.project_path}/.travis.yml") as travis_yml:
|
with open(f"{result.project_path}/.travis.yml") as travis_yml: # noqa: PTH123
|
||||||
try:
|
try:
|
||||||
yml = yaml.safe_load(travis_yml)["jobs"]["include"]
|
yml = yaml.safe_load(travis_yml)["jobs"]["include"]
|
||||||
assert yml[0]["script"] == ["ruff check ."]
|
assert yml[0]["script"] == ["ruff check ."]
|
||||||
|
@ -311,7 +311,7 @@ def test_gitlab_invokes_precommit_and_pytest(cookies, context, use_docker, expec
|
||||||
assert result.project_path.name == context["project_slug"]
|
assert result.project_path.name == context["project_slug"]
|
||||||
assert result.project_path.is_dir()
|
assert result.project_path.is_dir()
|
||||||
|
|
||||||
with open(f"{result.project_path}/.gitlab-ci.yml") as gitlab_yml:
|
with open(f"{result.project_path}/.gitlab-ci.yml") as gitlab_yml: # noqa: PTH123
|
||||||
try:
|
try:
|
||||||
gitlab_config = yaml.safe_load(gitlab_yml)
|
gitlab_config = yaml.safe_load(gitlab_yml)
|
||||||
assert gitlab_config["precommit"]["script"] == [
|
assert gitlab_config["precommit"]["script"] == [
|
||||||
|
@ -338,7 +338,7 @@ def test_github_invokes_linter_and_pytest(cookies, context, use_docker, expected
|
||||||
assert result.project_path.name == context["project_slug"]
|
assert result.project_path.name == context["project_slug"]
|
||||||
assert result.project_path.is_dir()
|
assert result.project_path.is_dir()
|
||||||
|
|
||||||
with open(f"{result.project_path}/.github/workflows/ci.yml") as github_yml:
|
with open(f"{result.project_path}/.github/workflows/ci.yml") as github_yml: # noqa: PTH123
|
||||||
try:
|
try:
|
||||||
github_config = yaml.safe_load(github_yml)
|
github_config = yaml.safe_load(github_yml)
|
||||||
linter_present = False
|
linter_present = False
|
||||||
|
@ -389,7 +389,7 @@ def test_pycharm_docs_removed(cookies, context, editor, pycharm_docs_exist):
|
||||||
context.update({"editor": editor})
|
context.update({"editor": editor})
|
||||||
result = cookies.bake(extra_context=context)
|
result = cookies.bake(extra_context=context)
|
||||||
|
|
||||||
with open(f"{result.project_path}/docs/index.rst") as f:
|
with open(f"{result.project_path}/docs/index.rst") as f: # noqa: PTH123
|
||||||
has_pycharm_docs = "pycharm/configuration" in f.read()
|
has_pycharm_docs = "pycharm/configuration" in f.read()
|
||||||
assert has_pycharm_docs is pycharm_docs_exist
|
assert has_pycharm_docs is pycharm_docs_exist
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue
Block a user