diff --git a/.github/workflows/stale.yml b/.github/stale.yml similarity index 100% rename from .github/workflows/stale.yml rename to .github/stale.yml diff --git a/.github/workflows/coveralls.yml b/.github/workflows/coveralls.yml new file mode 100644 index 00000000..a8e2875c --- /dev/null +++ b/.github/workflows/coveralls.yml @@ -0,0 +1,25 @@ +name: 📊 Check Coverage +on: + push: + branches: + - master + - '*.x' + paths-ignore: + - 'docs/**' + - '*.md' + - '*.rst' + pull_request: + branches: + - master + - '*.x' + paths-ignore: + - 'docs/**' + - '*.md' + - '*.rst' +jobs: + coveralls_finish: + # check coverage increase/decrease + runs-on: ubuntu-latest + steps: + - name: Coveralls Finished + uses: AndreMiras/coveralls-python-action@develop diff --git a/.github/workflows/deploy.yml b/.github/workflows/deploy.yml new file mode 100644 index 00000000..2a6cdc6b --- /dev/null +++ b/.github/workflows/deploy.yml @@ -0,0 +1,26 @@ +name: 🚀 Deploy to PyPI + +on: + push: + tags: + - 'v*' + +jobs: + build: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - name: Set up Python 3.8 + uses: actions/setup-python@v2 + with: + python-version: 3.8 + - name: Build wheel and source tarball + run: | + pip install wheel + python setup.py sdist bdist_wheel + - name: Publish a Python distribution to PyPI + uses: pypa/gh-action-pypi-publish@v1.1.0 + with: + user: __token__ + password: ${{ secrets.pypi_password }} diff --git a/.github/workflows/lint.yml b/.github/workflows/lint.yml new file mode 100644 index 00000000..95251d9b --- /dev/null +++ b/.github/workflows/lint.yml @@ -0,0 +1,26 @@ +name: 💅 Lint + +on: [push, pull_request] + +jobs: + build: + runs-on: ubuntu-latest + + steps: + - uses: actions/checkout@v2 + - name: Set up Python 3.8 + uses: actions/setup-python@v2 + with: + python-version: 3.8 + - name: Install dependencies + run: | + python -m pip install --upgrade pip + pip install tox + - name: Run lint + run: tox + env: + TOXENV: pre-commit + - name: Run mypy + run: tox + env: + TOXENV: mypy diff --git a/.github/workflows/tests.yml b/.github/workflows/tests.yml index 6de43f37..cdc4d01e 100644 --- a/.github/workflows/tests.yml +++ b/.github/workflows/tests.yml @@ -52,32 +52,3 @@ jobs: - run: pip install tox - run: tox -e ${{ matrix.tox }} - - coveralls_finish: - # check coverage increase/decrease - needs: tests - runs-on: ubuntu-latest - steps: - - name: Coveralls Finished - uses: AndreMiras/coveralls-python-action@develop - - deploy: - # builds and publishes to PyPi - runs-on: ubuntu-latest - steps: - - uses: actions/checkout@v2 - - name: Set up Python - uses: actions/setup-python@v2 - with: - python-version: '3.7' - - name: Install dependencies - run: | - python -m pip install --upgrade pip - pip install build - - name: Build package - run: python -m build - - name: Publish package - uses: pypa/gh-action-pypi-publish@release/v1 - with: - user: __token__ - password: ${{ secrets.PYPI_API_TOKEN }} diff --git a/.pre-commit-config.yml b/.pre-commit-config.yaml similarity index 63% rename from .pre-commit-config.yml rename to .pre-commit-config.yaml index c9ffc21e..bd6a7340 100644 --- a/.pre-commit-config.yml +++ b/.pre-commit-config.yaml @@ -1,6 +1,9 @@ +default_language_version: + python: python3.8 + repos: - repo: git://github.com/pre-commit/pre-commit-hooks - rev: v2.1.0 + rev: v2.3.0 hooks: - id: check-merge-conflict - id: check-json @@ -13,16 +16,15 @@ repos: - --autofix - id: trailing-whitespace exclude: README.md -- repo: https://github.com/asottile/pyupgrade - rev: v1.12.0 +- repo: git://github.com/asottile/pyupgrade + rev: v2.24.0 hooks: - id: pyupgrade -- repo: https://github.com/ambv/black - rev: 19.10b0 +- repo: git://github.com/ambv/black + rev: 19.3b0 hooks: - - id: black - language_version: python3 -- repo: https://github.com/PyCQA/flake8 - rev: 3.7.8 + - id: black +- repo: git://github.com/PyCQA/flake8 + rev: 3.8.4 hooks: - id: flake8 diff --git a/graphene/pyutils/dataclasses.py b/graphene/pyutils/dataclasses.py index 19530eff..1a474526 100644 --- a/graphene/pyutils/dataclasses.py +++ b/graphene/pyutils/dataclasses.py @@ -291,14 +291,7 @@ class Field: class _DataclassParams: - __slots__ = ( - "init", - "repr", - "eq", - "order", - "unsafe_hash", - "frozen", - ) + __slots__ = ("init", "repr", "eq", "order", "unsafe_hash", "frozen") def __init__(self, init, repr, eq, order, unsafe_hash, frozen): self.init = init @@ -442,13 +435,11 @@ def _field_init(f, frozen, globals, self_name): # This field does not need initialization. Signify that # to the caller by returning None. return None - # Only test this now, so that we can create variables for the # default. However, return None to signify that we're not going # to actually do the assignment statement for InitVars. if f._field_type == _FIELD_INITVAR: return None - # Now, actually generate the field assignment. return _field_assign(frozen, f.name, value, self_name) @@ -490,7 +481,6 @@ def _init_fn(fields, frozen, has_post_init, self_name): raise TypeError( f"non-default argument {f.name!r} " "follows default argument" ) - globals = {"MISSING": MISSING, "_HAS_DEFAULT_FACTORY": _HAS_DEFAULT_FACTORY} body_lines = [] @@ -500,16 +490,13 @@ def _init_fn(fields, frozen, has_post_init, self_name): # initialization (it's a pseudo-field). Just skip it. if line: body_lines.append(line) - # Does this class have a post-init function? if has_post_init: params_str = ",".join(f.name for f in fields if f._field_type is _FIELD_INITVAR) body_lines.append(f"{self_name}.{_POST_INIT_NAME}({params_str})") - # If no body lines, use 'pass'. if not body_lines: body_lines = ["pass"] - locals = {f"_type_{f.name}": f.type for f in fields} return _create_fn( "__init__", @@ -674,7 +661,6 @@ def _get_field(cls, a_name, a_type): # This is a field in __slots__, so it has no default value. default = MISSING f = field(default=default) - # Only at this point do we know the name and the type. Set them. f.name = a_name f.type = a_type @@ -705,7 +691,6 @@ def _get_field(cls, a_name, a_type): and _is_type(f.type, cls, typing, typing.ClassVar, _is_classvar) ): f._field_type = _FIELD_CLASSVAR - # If the type is InitVar, or if it's a matching string annotation, # then it's an InitVar. if f._field_type is _FIELD: @@ -717,7 +702,6 @@ def _get_field(cls, a_name, a_type): and _is_type(f.type, cls, dataclasses, dataclasses.InitVar, _is_initvar) ): f._field_type = _FIELD_INITVAR - # Validations for individual fields. This is delayed until now, # instead of in the Field() constructor, since only here do we # know the field name, which allows for better error reporting. @@ -731,14 +715,12 @@ def _get_field(cls, a_name, a_type): # example, how about init=False (or really, # init=)? It makes no sense for # ClassVar and InitVar to specify init=. - # For real fields, disallow mutable defaults for known types. if f._field_type is _FIELD and isinstance(f.default, (list, dict, set)): raise ValueError( f"mutable default {type(f.default)} for field " f"{f.name} is not allowed: use default_factory" ) - return f @@ -827,7 +809,6 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen): fields[f.name] = f if getattr(b, _PARAMS).frozen: any_frozen_base = True - # Annotations that are defined in this class (not in base # classes). If __annotations__ isn't present, then this class # adds no new annotations. We use this to compute fields that are @@ -866,22 +847,18 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen): delattr(cls, f.name) else: setattr(cls, f.name, f.default) - # Do we have any Field members that don't also have annotations? for name, value in cls.__dict__.items(): if isinstance(value, Field) and not name in cls_annotations: raise TypeError(f"{name!r} is a field but has no type annotation") - # Check rules that apply if we are derived from any dataclasses. if has_dataclass_bases: # Raise an exception if any of our bases are frozen, but we're not. if any_frozen_base and not frozen: raise TypeError("cannot inherit non-frozen dataclass from a " "frozen one") - # Raise an exception if we're frozen, but none of our bases are. if not any_frozen_base and frozen: raise TypeError("cannot inherit frozen dataclass from a " "non-frozen one") - # Remember all of the fields on our class (including bases). This # also marks this class as being a dataclass. setattr(cls, _FIELDS, fields) @@ -900,7 +877,6 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen): # eq methods. if order and not eq: raise ValueError("eq must be true if order is true") - if init: # Does this class have a post-init function? has_post_init = hasattr(cls, _POST_INIT_NAME) @@ -920,7 +896,6 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen): "__dataclass_self__" if "self" in fields else "self", ), ) - # Get the fields as a list, and include only real fields. This is # used in all of the following methods. field_list = [f for f in fields.values() if f._field_type is _FIELD] @@ -928,7 +903,6 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen): if repr: flds = [f for f in field_list if f.repr] _set_new_attribute(cls, "__repr__", _repr_fn(flds)) - if eq: # Create _eq__ method. There's no need for a __ne__ method, # since python will call __eq__ and negate it. @@ -938,7 +912,6 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen): _set_new_attribute( cls, "__eq__", _cmp_fn("__eq__", "==", self_tuple, other_tuple) ) - if order: # Create and set the ordering methods. flds = [f for f in field_list if f.compare] @@ -958,7 +931,6 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen): f"in class {cls.__name__}. Consider using " "functools.total_ordering" ) - if frozen: for fn in _frozen_get_del_attr(cls, field_list): if _set_new_attribute(cls, fn.__name__, fn): @@ -966,7 +938,6 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen): f"Cannot overwrite attribute {fn.__name__} " f"in class {cls.__name__}" ) - # Decide if/how we're going to create a hash function. hash_action = _hash_action[ bool(unsafe_hash), bool(eq), bool(frozen), has_explicit_hash @@ -975,11 +946,9 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen): # No need to call _set_new_attribute here, since by the time # we're here the overwriting is unconditional. cls.__hash__ = hash_action(cls, field_list) - if not getattr(cls, "__doc__"): # Create a class doc-string. cls.__doc__ = cls.__name__ + str(inspect.signature(cls)).replace(" -> None", "") - return cls @@ -1015,7 +984,6 @@ def dataclass( if _cls is None: # We're called with parens. return wrap - # We're called as @dataclass without parens. return wrap(_cls) @@ -1032,7 +1000,6 @@ def fields(class_or_instance): fields = getattr(class_or_instance, _FIELDS) except AttributeError: raise TypeError("must be called with a dataclass type or instance") - # Exclude pseudo-fields. Note that fields is sorted by insertion # order, so the order of the tuple is as the fields were defined. return tuple(f for f in fields.values() if f._field_type is _FIELD) @@ -1174,7 +1141,6 @@ def make_dataclass( else: # Copy namespace since we're going to mutate it. namespace = namespace.copy() - # While we're looking through the field names, validate that they # are identifiers, are not keywords, and not duplicates. seen = set() @@ -1184,23 +1150,20 @@ def make_dataclass( name = item tp = "typing.Any" elif len(item) == 2: - name, tp, = item + (name, tp) = item elif len(item) == 3: name, tp, spec = item namespace[name] = spec else: raise TypeError(f"Invalid field: {item!r}") - if not isinstance(name, str) or not name.isidentifier(): raise TypeError(f"Field names must be valid identifers: {name!r}") if keyword.iskeyword(name): raise TypeError(f"Field names must not be keywords: {name!r}") if name in seen: raise TypeError(f"Field name duplicated: {name!r}") - seen.add(name) anns[name] = tp - namespace["__annotations__"] = anns # We use `types.new_class()` instead of simply `type()` to allow dynamic creation # of generic dataclassses. @@ -1229,14 +1192,13 @@ def replace(obj, **changes): c = C(1, 2) c1 = replace(c, x=3) assert c1.x == 3 and c1.y == 2 - """ + """ # We're going to mutate 'changes', but that's okay because it's a # new dict, even if called with 'replace(obj, **my_changes)'. if not _is_dataclass_instance(obj): raise TypeError("replace() should be called on dataclass instances") - # It's an error to have init=False fields in 'changes'. # If a field is not in 'changes', read its value from the provided obj. @@ -1250,10 +1212,8 @@ def replace(obj, **changes): "replace()" ) continue - if f.name not in changes: changes[f.name] = getattr(obj, f.name) - # Create the new object, which calls __init__() and # __post_init__() (if defined), using all of the init fields we've # added and/or left in 'changes'. If there are values supplied in diff --git a/graphene/relay/node.py b/graphene/relay/node.py index b189bc97..8defefff 100644 --- a/graphene/relay/node.py +++ b/graphene/relay/node.py @@ -92,11 +92,9 @@ class Node(AbstractNode): _type, _id = cls.from_global_id(global_id) except Exception as e: raise Exception( - ( - f'Unable to parse global ID "{global_id}". ' - 'Make sure it is a base64 encoded string in the format: "TypeName:id". ' - f"Exception message: {str(e)}" - ) + f'Unable to parse global ID "{global_id}". ' + 'Make sure it is a base64 encoded string in the format: "TypeName:id". ' + f"Exception message: {str(e)}" ) graphene_type = info.schema.get_type(_type) diff --git a/graphene/relay/tests/test_connection_async.py b/graphene/relay/tests/test_connection_async.py index b139f6a3..ae228cf9 100644 --- a/graphene/relay/tests/test_connection_async.py +++ b/graphene/relay/tests/test_connection_async.py @@ -51,10 +51,10 @@ letters = {letter: Letter(id=i, letter=letter) for i, letter in enumerate(letter def edges(selected_letters): return [ { - "node": {"id": base64("Letter:%s" % l.id), "letter": l.letter}, - "cursor": base64("arrayconnection:%s" % l.id), + "node": {"id": base64("Letter:%s" % letter.id), "letter": letter.letter}, + "cursor": base64("arrayconnection:%s" % letter.id), } - for l in [letters[i] for i in selected_letters] + for letter in [letters[i] for i in selected_letters] ] diff --git a/graphene/relay/tests/test_connection_query.py b/graphene/relay/tests/test_connection_query.py index cac4b65b..b697c462 100644 --- a/graphene/relay/tests/test_connection_query.py +++ b/graphene/relay/tests/test_connection_query.py @@ -51,10 +51,10 @@ letters = {letter: Letter(id=i, letter=letter) for i, letter in enumerate(letter def edges(selected_letters): return [ { - "node": {"id": base64("Letter:%s" % l.id), "letter": l.letter}, - "cursor": base64("arrayconnection:%s" % l.id), + "node": {"id": base64("Letter:%s" % letter.id), "letter": letter.letter}, + "cursor": base64("arrayconnection:%s" % letter.id), } - for l in [letters[i] for i in selected_letters] + for letter in [letters[i] for i in selected_letters] ] @@ -66,7 +66,6 @@ def cursor_for(ltr): async def execute(args=""): if args: args = "(" + args + ")" - return await schema.execute_async( """ { @@ -164,14 +163,14 @@ async def test_respects_first_and_after_and_before_too_few(): @mark.asyncio async def test_respects_first_and_after_and_before_too_many(): await check( - f'first: 4, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BCD", + f'first: 4, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BCD" ) @mark.asyncio async def test_respects_first_and_after_and_before_exactly_right(): await check( - f'first: 3, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BCD", + f'first: 3, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BCD" ) @@ -187,14 +186,14 @@ async def test_respects_last_and_after_and_before_too_few(): @mark.asyncio async def test_respects_last_and_after_and_before_too_many(): await check( - f'last: 4, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BCD", + f'last: 4, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BCD" ) @mark.asyncio async def test_respects_last_and_after_and_before_exactly_right(): await check( - f'last: 3, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BCD", + f'last: 3, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BCD" ) diff --git a/graphene/types/mutation.py b/graphene/types/mutation.py index 6e041bbf..ca87775a 100644 --- a/graphene/types/mutation.py +++ b/graphene/types/mutation.py @@ -76,7 +76,6 @@ class Mutation(ObjectType): ): if not _meta: _meta = MutationOptions(cls) - output = output or getattr(cls, "Output", None) fields = {} @@ -85,43 +84,35 @@ class Mutation(ObjectType): interface, Interface ), f'All interfaces of {cls.__name__} must be a subclass of Interface. Received "{interface}".' fields.update(interface._meta.fields) - if not output: # If output is defined, we don't need to get the fields fields = {} for base in reversed(cls.__mro__): fields.update(yank_fields_from_attrs(base.__dict__, _as=Field)) output = cls - if not arguments: input_class = getattr(cls, "Arguments", None) if not input_class: input_class = getattr(cls, "Input", None) if input_class: warn_deprecation( - ( - f"Please use {cls.__name__}.Arguments instead of {cls.__name__}.Input." - " Input is now only used in ClientMutationID.\n" - "Read more:" - " https://github.com/graphql-python/graphene/blob/v2.0.0/UPGRADE-v2.0.md#mutation-input" - ) + f"Please use {cls.__name__}.Arguments instead of {cls.__name__}.Input." + " Input is now only used in ClientMutationID.\n" + "Read more:" + " https://github.com/graphql-python/graphene/blob/v2.0.0/UPGRADE-v2.0.md#mutation-input" ) - if input_class: arguments = props(input_class) else: arguments = {} - if not resolver: mutate = getattr(cls, "mutate", None) assert mutate, "All mutations must define a mutate method in it" resolver = get_unbound_function(mutate) - if _meta.fields: _meta.fields.update(fields) else: _meta.fields = fields - _meta.interfaces = interfaces _meta.output = output _meta.resolver = resolver @@ -133,7 +124,7 @@ class Mutation(ObjectType): def Field( cls, name=None, description=None, deprecation_reason=None, required=False ): - """ Mount instance of mutation Field. """ + """Mount instance of mutation Field.""" return Field( cls._meta.output, args=cls._meta.arguments, diff --git a/graphene/types/objecttype.py b/graphene/types/objecttype.py index c69be937..1ff29a2e 100644 --- a/graphene/types/objecttype.py +++ b/graphene/types/objecttype.py @@ -7,7 +7,6 @@ try: from dataclasses import make_dataclass, field except ImportError: from ..pyutils.dataclasses import make_dataclass, field # type: ignore - # For static type checking with Mypy MYPY = False if MYPY: @@ -28,7 +27,7 @@ class ObjectTypeMeta(BaseTypeMeta): pass base_cls = super().__new__( - cls, name_, (InterObjectType,) + bases, namespace, **options, + cls, name_, (InterObjectType,) + bases, namespace, **options ) if base_cls._meta: fields = [ @@ -133,7 +132,6 @@ class ObjectType(BaseType, metaclass=ObjectTypeMeta): ): if not _meta: _meta = ObjectTypeOptions(cls) - fields = {} for interface in interfaces: @@ -141,10 +139,8 @@ class ObjectType(BaseType, metaclass=ObjectTypeMeta): interface, Interface ), f'All interfaces of {cls.__name__} must be a subclass of Interface. Received "{interface}".' fields.update(interface._meta.fields) - for base in reversed(cls.__mro__): fields.update(yank_fields_from_attrs(base.__dict__, _as=Field)) - assert not (possible_types and cls.is_type_of), ( f"{cls.__name__}.Meta.possible_types will cause type collision with {cls.__name__}.is_type_of. " "Please use one or other." @@ -154,7 +150,6 @@ class ObjectType(BaseType, metaclass=ObjectTypeMeta): _meta.fields.update(fields) else: _meta.fields = fields - if not _meta.interfaces: _meta.interfaces = interfaces _meta.possible_types = possible_types diff --git a/graphene/types/schema.py b/graphene/types/schema.py index 1ff0bff4..0c6d4183 100644 --- a/graphene/types/schema.py +++ b/graphene/types/schema.py @@ -27,8 +27,6 @@ from graphql import ( GraphQLSchema, GraphQLString, ) -from graphql.execution import ExecutionContext -from graphql.execution.values import get_argument_values from ..utils.str_converters import to_camel_case from ..utils.get_unbound_function import get_unbound_function diff --git a/graphene/types/tests/test_base64.py b/graphene/types/tests/test_base64.py index b096dcbc..433f63c3 100644 --- a/graphene/types/tests/test_base64.py +++ b/graphene/types/tests/test_base64.py @@ -64,15 +64,11 @@ def test_base64_query_none(): def test_base64_query_invalid(): - bad_inputs = [ - dict(), - 123, - "This is not valid base64", - ] + bad_inputs = [dict(), 123, "This is not valid base64"] for input_ in bad_inputs: result = schema.execute( - """{ base64(input: $input) }""", variables={"input": input_}, + """{ base64(input: $input) }""", variables={"input": input_} ) assert isinstance(result.errors, list) assert len(result.errors) == 1 diff --git a/graphene/types/tests/test_enum.py b/graphene/types/tests/test_enum.py index 8d5e87af..6e204aa9 100644 --- a/graphene/types/tests/test_enum.py +++ b/graphene/types/tests/test_enum.py @@ -26,8 +26,8 @@ def test_enum_construction(): assert RGB._meta.description == "Description" values = RGB._meta.enum.__members__.values() - assert sorted([v.name for v in values]) == ["BLUE", "GREEN", "RED"] - assert sorted([v.description for v in values]) == [ + assert sorted(v.name for v in values) == ["BLUE", "GREEN", "RED"] + assert sorted(v.description for v in values) == [ "Description BLUE", "Description GREEN", "Description RED", @@ -52,7 +52,7 @@ def test_enum_instance_construction(): RGB = Enum("RGB", "RED,GREEN,BLUE") values = RGB._meta.enum.__members__.values() - assert sorted([v.name for v in values]) == ["BLUE", "GREEN", "RED"] + assert sorted(v.name for v in values) == ["BLUE", "GREEN", "RED"] def test_enum_from_builtin_enum(): @@ -465,7 +465,7 @@ def test_mutation_enum_input_type(): color } } - """, + """ ) assert not result.errors assert result.data == {"createPaint": {"color": "RED"}} diff --git a/graphene/types/tests/test_schema.py b/graphene/types/tests/test_schema.py index 9cdbde3b..fe4739c9 100644 --- a/graphene/types/tests/test_schema.py +++ b/graphene/types/tests/test_schema.py @@ -1,6 +1,5 @@ from graphql.type import GraphQLObjectType, GraphQLSchema -from graphql import GraphQLError -from pytest import mark, raises, fixture +from pytest import raises from graphene.tests.utils import dedent diff --git a/graphene/utils/orderedtype.py b/graphene/utils/orderedtype.py index fb8783d2..294ad54e 100644 --- a/graphene/utils/orderedtype.py +++ b/graphene/utils/orderedtype.py @@ -36,4 +36,4 @@ class OrderedType: return NotImplemented def __hash__(self): - return hash((self.creation_counter)) + return hash(self.creation_counter) diff --git a/graphene/validation/__init__.py b/graphene/validation/__init__.py index f338e2d0..5b592a2c 100644 --- a/graphene/validation/__init__.py +++ b/graphene/validation/__init__.py @@ -2,7 +2,4 @@ from .depth_limit import depth_limit_validator from .disable_introspection import DisableIntrospection -__all__ = [ - "DisableIntrospection", - "depth_limit_validator" -] +__all__ = ["DisableIntrospection", "depth_limit_validator"] diff --git a/graphene/validation/depth_limit.py b/graphene/validation/depth_limit.py index c72b78d0..5be852c7 100644 --- a/graphene/validation/depth_limit.py +++ b/graphene/validation/depth_limit.py @@ -30,7 +30,6 @@ try: except ImportError: # backwards compatibility for v3.6 from typing import Pattern - from typing import Callable, Dict, List, Optional, Union from graphql import GraphQLError @@ -75,7 +74,6 @@ def depth_limit_validator( operation_name=name, ignore=ignore, ) - if callable(callback): callback(query_depths) super().__init__(validation_context) @@ -90,7 +88,6 @@ def get_fragments( for definition in definitions: if isinstance(definition, FragmentDefinitionNode): fragments[definition.name.value] = definition - return fragments @@ -105,7 +102,6 @@ def get_queries_and_mutations( if isinstance(definition, OperationDefinitionNode): operation = definition.name.value if definition.name else "anonymous" operations[operation] = definition - return operations @@ -126,7 +122,6 @@ def determine_depth( ) ) return depth_so_far - if isinstance(node, FieldNode): should_ignore = is_introspection_key(node.name.value) or is_ignored( node, ignore @@ -134,7 +129,6 @@ def determine_depth( if should_ignore or not node.selection_set: return 0 - return 1 + max( map( lambda selection: determine_depth( @@ -177,13 +171,14 @@ def determine_depth( ) ) else: - raise Exception(f"Depth crawler cannot handle: {node.kind}.") # pragma: no cover + raise Exception( + f"Depth crawler cannot handle: {node.kind}." + ) # pragma: no cover def is_ignored(node: FieldNode, ignore: Optional[List[IgnoreType]] = None) -> bool: if ignore is None: return False - for rule in ignore: field_name = node.name.value if isinstance(rule, str): @@ -197,5 +192,4 @@ def is_ignored(node: FieldNode, ignore: Optional[List[IgnoreType]] = None) -> bo return True else: raise ValueError(f"Invalid ignore option: {rule}.") - return False diff --git a/graphene/validation/disable_introspection.py b/graphene/validation/disable_introspection.py index be25a287..49a7d607 100644 --- a/graphene/validation/disable_introspection.py +++ b/graphene/validation/disable_introspection.py @@ -11,7 +11,6 @@ class DisableIntrospection(ValidationRule): if is_introspection_key(field_name): self.report_error( GraphQLError( - f"Cannot query '{field_name}': introspection is disabled.", - node, + f"Cannot query '{field_name}': introspection is disabled.", node ) ) diff --git a/graphene/validation/tests/test_depth_limit_validator.py b/graphene/validation/tests/test_depth_limit_validator.py index 499adbcc..29c1508c 100644 --- a/graphene/validation/tests/test_depth_limit_validator.py +++ b/graphene/validation/tests/test_depth_limit_validator.py @@ -48,26 +48,11 @@ class HumanType(ObjectType): class Query(ObjectType): - user = Field( - HumanType, - required=True, - name=String() - ) - version = String( - required=True - ) - user1 = Field( - HumanType, - required=True - ) - user2 = Field( - HumanType, - required=True - ) - user3 = Field( - HumanType, - required=True - ) + user = Field(HumanType, required=True, name=String()) + version = String(required=True) + user1 = Field(HumanType, required=True) + user2 = Field(HumanType, required=True) + user3 = Field(HumanType, required=True) @staticmethod def resolve_user(root, info, name=None): @@ -91,9 +76,7 @@ def run_query(query: str, max_depth: int, ignore=None): document_ast=document, rules=( depth_limit_validator( - max_depth=max_depth, - ignore=ignore, - callback=callback + max_depth=max_depth, ignore=ignore, callback=callback ), ), ) @@ -253,11 +236,7 @@ def test_should_ignore_field(): errors, result = run_query( query, 10, - ignore=[ - "user1", - re.compile("user2"), - lambda field_name: field_name == "user3", - ], + ignore=["user1", re.compile("user2"), lambda field_name: field_name == "user3"], ) expected = {"read1": 2, "read2": 0} @@ -272,8 +251,4 @@ def test_should_raise_invalid_ignore(): } """ with raises(ValueError, match="Invalid ignore option:"): - run_query( - query, - 10, - ignore=[True], - ) + run_query(query, 10, ignore=[True]) diff --git a/graphene/validation/tests/test_disable_introspection.py b/graphene/validation/tests/test_disable_introspection.py index 06019900..958a1afa 100644 --- a/graphene/validation/tests/test_disable_introspection.py +++ b/graphene/validation/tests/test_disable_introspection.py @@ -5,9 +5,7 @@ from ..disable_introspection import DisableIntrospection class Query(ObjectType): - name = String( - required=True - ) + name = String(required=True) @staticmethod def resolve_name(root, info): @@ -23,9 +21,7 @@ def run_query(query: str): errors = validate( schema=schema.graphql_schema, document_ast=document, - rules=( - DisableIntrospection, - ), + rules=(DisableIntrospection,), ) return errors diff --git a/tox.ini b/tox.ini index dd922c46..c4bf6ad0 100644 --- a/tox.ini +++ b/tox.ini @@ -11,25 +11,25 @@ commands = py{36,37,38}: pytest --cov=graphene graphene examples {posargs} [testenv:pre-commit] -basepython=python3.7 +basepython=python3.8 deps = pre-commit>=2,<3 setenv = LC_CTYPE=en_US.UTF-8 commands = - pre-commit {posargs:run --all-files} + pre-commit run --all-files --show-diff-on-failure [testenv:mypy] -basepython=python3.7 +basepython=python3.8 deps = mypy>=0.761,<1 commands = mypy graphene [testenv:flake8] -basepython=python3.7 +basepython=python3.8 deps = - flake8>=3.7,<4 + flake8>=3.8,<4 commands = pip install --pre -e . flake8 graphene