Merge pull request #1359 from codebyaryan/fix-actions

Fix actions
This commit is contained in:
Syrus Akbary 2021-08-23 22:07:12 -05:00 committed by GitHub
commit f039af2810
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
23 changed files with 138 additions and 191 deletions

25
.github/workflows/coveralls.yml vendored Normal file
View File

@ -0,0 +1,25 @@
name: 📊 Check Coverage
on:
push:
branches:
- master
- '*.x'
paths-ignore:
- 'docs/**'
- '*.md'
- '*.rst'
pull_request:
branches:
- master
- '*.x'
paths-ignore:
- 'docs/**'
- '*.md'
- '*.rst'
jobs:
coveralls_finish:
# check coverage increase/decrease
runs-on: ubuntu-latest
steps:
- name: Coveralls Finished
uses: AndreMiras/coveralls-python-action@develop

26
.github/workflows/deploy.yml vendored Normal file
View File

@ -0,0 +1,26 @@
name: 🚀 Deploy to PyPI
on:
push:
tags:
- 'v*'
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up Python 3.8
uses: actions/setup-python@v2
with:
python-version: 3.8
- name: Build wheel and source tarball
run: |
pip install wheel
python setup.py sdist bdist_wheel
- name: Publish a Python distribution to PyPI
uses: pypa/gh-action-pypi-publish@v1.1.0
with:
user: __token__
password: ${{ secrets.pypi_password }}

26
.github/workflows/lint.yml vendored Normal file
View File

@ -0,0 +1,26 @@
name: 💅 Lint
on: [push, pull_request]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up Python 3.8
uses: actions/setup-python@v2
with:
python-version: 3.8
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install tox
- name: Run lint
run: tox
env:
TOXENV: pre-commit
- name: Run mypy
run: tox
env:
TOXENV: mypy

View File

@ -52,32 +52,3 @@ jobs:
- run: pip install tox - run: pip install tox
- run: tox -e ${{ matrix.tox }} - run: tox -e ${{ matrix.tox }}
coveralls_finish:
# check coverage increase/decrease
needs: tests
runs-on: ubuntu-latest
steps:
- name: Coveralls Finished
uses: AndreMiras/coveralls-python-action@develop
deploy:
# builds and publishes to PyPi
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up Python
uses: actions/setup-python@v2
with:
python-version: '3.7'
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install build
- name: Build package
run: python -m build
- name: Publish package
uses: pypa/gh-action-pypi-publish@release/v1
with:
user: __token__
password: ${{ secrets.PYPI_API_TOKEN }}

View File

@ -1,6 +1,9 @@
default_language_version:
python: python3.8
repos: repos:
- repo: git://github.com/pre-commit/pre-commit-hooks - repo: git://github.com/pre-commit/pre-commit-hooks
rev: v2.1.0 rev: v2.3.0
hooks: hooks:
- id: check-merge-conflict - id: check-merge-conflict
- id: check-json - id: check-json
@ -13,16 +16,15 @@ repos:
- --autofix - --autofix
- id: trailing-whitespace - id: trailing-whitespace
exclude: README.md exclude: README.md
- repo: https://github.com/asottile/pyupgrade - repo: git://github.com/asottile/pyupgrade
rev: v1.12.0 rev: v2.24.0
hooks: hooks:
- id: pyupgrade - id: pyupgrade
- repo: https://github.com/ambv/black - repo: git://github.com/ambv/black
rev: 19.10b0 rev: 19.3b0
hooks: hooks:
- id: black - id: black
language_version: python3 - repo: git://github.com/PyCQA/flake8
- repo: https://github.com/PyCQA/flake8 rev: 3.8.4
rev: 3.7.8
hooks: hooks:
- id: flake8 - id: flake8

View File

@ -291,14 +291,7 @@ class Field:
class _DataclassParams: class _DataclassParams:
__slots__ = ( __slots__ = ("init", "repr", "eq", "order", "unsafe_hash", "frozen")
"init",
"repr",
"eq",
"order",
"unsafe_hash",
"frozen",
)
def __init__(self, init, repr, eq, order, unsafe_hash, frozen): def __init__(self, init, repr, eq, order, unsafe_hash, frozen):
self.init = init self.init = init
@ -442,13 +435,11 @@ def _field_init(f, frozen, globals, self_name):
# This field does not need initialization. Signify that # This field does not need initialization. Signify that
# to the caller by returning None. # to the caller by returning None.
return None return None
# Only test this now, so that we can create variables for the # Only test this now, so that we can create variables for the
# default. However, return None to signify that we're not going # default. However, return None to signify that we're not going
# to actually do the assignment statement for InitVars. # to actually do the assignment statement for InitVars.
if f._field_type == _FIELD_INITVAR: if f._field_type == _FIELD_INITVAR:
return None return None
# Now, actually generate the field assignment. # Now, actually generate the field assignment.
return _field_assign(frozen, f.name, value, self_name) return _field_assign(frozen, f.name, value, self_name)
@ -490,7 +481,6 @@ def _init_fn(fields, frozen, has_post_init, self_name):
raise TypeError( raise TypeError(
f"non-default argument {f.name!r} " "follows default argument" f"non-default argument {f.name!r} " "follows default argument"
) )
globals = {"MISSING": MISSING, "_HAS_DEFAULT_FACTORY": _HAS_DEFAULT_FACTORY} globals = {"MISSING": MISSING, "_HAS_DEFAULT_FACTORY": _HAS_DEFAULT_FACTORY}
body_lines = [] body_lines = []
@ -500,16 +490,13 @@ def _init_fn(fields, frozen, has_post_init, self_name):
# initialization (it's a pseudo-field). Just skip it. # initialization (it's a pseudo-field). Just skip it.
if line: if line:
body_lines.append(line) body_lines.append(line)
# Does this class have a post-init function? # Does this class have a post-init function?
if has_post_init: if has_post_init:
params_str = ",".join(f.name for f in fields if f._field_type is _FIELD_INITVAR) params_str = ",".join(f.name for f in fields if f._field_type is _FIELD_INITVAR)
body_lines.append(f"{self_name}.{_POST_INIT_NAME}({params_str})") body_lines.append(f"{self_name}.{_POST_INIT_NAME}({params_str})")
# If no body lines, use 'pass'. # If no body lines, use 'pass'.
if not body_lines: if not body_lines:
body_lines = ["pass"] body_lines = ["pass"]
locals = {f"_type_{f.name}": f.type for f in fields} locals = {f"_type_{f.name}": f.type for f in fields}
return _create_fn( return _create_fn(
"__init__", "__init__",
@ -674,7 +661,6 @@ def _get_field(cls, a_name, a_type):
# This is a field in __slots__, so it has no default value. # This is a field in __slots__, so it has no default value.
default = MISSING default = MISSING
f = field(default=default) f = field(default=default)
# Only at this point do we know the name and the type. Set them. # Only at this point do we know the name and the type. Set them.
f.name = a_name f.name = a_name
f.type = a_type f.type = a_type
@ -705,7 +691,6 @@ def _get_field(cls, a_name, a_type):
and _is_type(f.type, cls, typing, typing.ClassVar, _is_classvar) and _is_type(f.type, cls, typing, typing.ClassVar, _is_classvar)
): ):
f._field_type = _FIELD_CLASSVAR f._field_type = _FIELD_CLASSVAR
# If the type is InitVar, or if it's a matching string annotation, # If the type is InitVar, or if it's a matching string annotation,
# then it's an InitVar. # then it's an InitVar.
if f._field_type is _FIELD: if f._field_type is _FIELD:
@ -717,7 +702,6 @@ def _get_field(cls, a_name, a_type):
and _is_type(f.type, cls, dataclasses, dataclasses.InitVar, _is_initvar) and _is_type(f.type, cls, dataclasses, dataclasses.InitVar, _is_initvar)
): ):
f._field_type = _FIELD_INITVAR f._field_type = _FIELD_INITVAR
# Validations for individual fields. This is delayed until now, # Validations for individual fields. This is delayed until now,
# instead of in the Field() constructor, since only here do we # instead of in the Field() constructor, since only here do we
# know the field name, which allows for better error reporting. # know the field name, which allows for better error reporting.
@ -731,14 +715,12 @@ def _get_field(cls, a_name, a_type):
# example, how about init=False (or really, # example, how about init=False (or really,
# init=<not-the-default-init-value>)? It makes no sense for # init=<not-the-default-init-value>)? It makes no sense for
# ClassVar and InitVar to specify init=<anything>. # ClassVar and InitVar to specify init=<anything>.
# For real fields, disallow mutable defaults for known types. # For real fields, disallow mutable defaults for known types.
if f._field_type is _FIELD and isinstance(f.default, (list, dict, set)): if f._field_type is _FIELD and isinstance(f.default, (list, dict, set)):
raise ValueError( raise ValueError(
f"mutable default {type(f.default)} for field " f"mutable default {type(f.default)} for field "
f"{f.name} is not allowed: use default_factory" f"{f.name} is not allowed: use default_factory"
) )
return f return f
@ -827,7 +809,6 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen):
fields[f.name] = f fields[f.name] = f
if getattr(b, _PARAMS).frozen: if getattr(b, _PARAMS).frozen:
any_frozen_base = True any_frozen_base = True
# Annotations that are defined in this class (not in base # Annotations that are defined in this class (not in base
# classes). If __annotations__ isn't present, then this class # classes). If __annotations__ isn't present, then this class
# adds no new annotations. We use this to compute fields that are # adds no new annotations. We use this to compute fields that are
@ -866,22 +847,18 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen):
delattr(cls, f.name) delattr(cls, f.name)
else: else:
setattr(cls, f.name, f.default) setattr(cls, f.name, f.default)
# Do we have any Field members that don't also have annotations? # Do we have any Field members that don't also have annotations?
for name, value in cls.__dict__.items(): for name, value in cls.__dict__.items():
if isinstance(value, Field) and not name in cls_annotations: if isinstance(value, Field) and not name in cls_annotations:
raise TypeError(f"{name!r} is a field but has no type annotation") raise TypeError(f"{name!r} is a field but has no type annotation")
# Check rules that apply if we are derived from any dataclasses. # Check rules that apply if we are derived from any dataclasses.
if has_dataclass_bases: if has_dataclass_bases:
# Raise an exception if any of our bases are frozen, but we're not. # Raise an exception if any of our bases are frozen, but we're not.
if any_frozen_base and not frozen: if any_frozen_base and not frozen:
raise TypeError("cannot inherit non-frozen dataclass from a " "frozen one") raise TypeError("cannot inherit non-frozen dataclass from a " "frozen one")
# Raise an exception if we're frozen, but none of our bases are. # Raise an exception if we're frozen, but none of our bases are.
if not any_frozen_base and frozen: if not any_frozen_base and frozen:
raise TypeError("cannot inherit frozen dataclass from a " "non-frozen one") raise TypeError("cannot inherit frozen dataclass from a " "non-frozen one")
# Remember all of the fields on our class (including bases). This # Remember all of the fields on our class (including bases). This
# also marks this class as being a dataclass. # also marks this class as being a dataclass.
setattr(cls, _FIELDS, fields) setattr(cls, _FIELDS, fields)
@ -900,7 +877,6 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen):
# eq methods. # eq methods.
if order and not eq: if order and not eq:
raise ValueError("eq must be true if order is true") raise ValueError("eq must be true if order is true")
if init: if init:
# Does this class have a post-init function? # Does this class have a post-init function?
has_post_init = hasattr(cls, _POST_INIT_NAME) has_post_init = hasattr(cls, _POST_INIT_NAME)
@ -920,7 +896,6 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen):
"__dataclass_self__" if "self" in fields else "self", "__dataclass_self__" if "self" in fields else "self",
), ),
) )
# Get the fields as a list, and include only real fields. This is # Get the fields as a list, and include only real fields. This is
# used in all of the following methods. # used in all of the following methods.
field_list = [f for f in fields.values() if f._field_type is _FIELD] field_list = [f for f in fields.values() if f._field_type is _FIELD]
@ -928,7 +903,6 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen):
if repr: if repr:
flds = [f for f in field_list if f.repr] flds = [f for f in field_list if f.repr]
_set_new_attribute(cls, "__repr__", _repr_fn(flds)) _set_new_attribute(cls, "__repr__", _repr_fn(flds))
if eq: if eq:
# Create _eq__ method. There's no need for a __ne__ method, # Create _eq__ method. There's no need for a __ne__ method,
# since python will call __eq__ and negate it. # since python will call __eq__ and negate it.
@ -938,7 +912,6 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen):
_set_new_attribute( _set_new_attribute(
cls, "__eq__", _cmp_fn("__eq__", "==", self_tuple, other_tuple) cls, "__eq__", _cmp_fn("__eq__", "==", self_tuple, other_tuple)
) )
if order: if order:
# Create and set the ordering methods. # Create and set the ordering methods.
flds = [f for f in field_list if f.compare] flds = [f for f in field_list if f.compare]
@ -958,7 +931,6 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen):
f"in class {cls.__name__}. Consider using " f"in class {cls.__name__}. Consider using "
"functools.total_ordering" "functools.total_ordering"
) )
if frozen: if frozen:
for fn in _frozen_get_del_attr(cls, field_list): for fn in _frozen_get_del_attr(cls, field_list):
if _set_new_attribute(cls, fn.__name__, fn): if _set_new_attribute(cls, fn.__name__, fn):
@ -966,7 +938,6 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen):
f"Cannot overwrite attribute {fn.__name__} " f"Cannot overwrite attribute {fn.__name__} "
f"in class {cls.__name__}" f"in class {cls.__name__}"
) )
# Decide if/how we're going to create a hash function. # Decide if/how we're going to create a hash function.
hash_action = _hash_action[ hash_action = _hash_action[
bool(unsafe_hash), bool(eq), bool(frozen), has_explicit_hash bool(unsafe_hash), bool(eq), bool(frozen), has_explicit_hash
@ -975,11 +946,9 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen):
# No need to call _set_new_attribute here, since by the time # No need to call _set_new_attribute here, since by the time
# we're here the overwriting is unconditional. # we're here the overwriting is unconditional.
cls.__hash__ = hash_action(cls, field_list) cls.__hash__ = hash_action(cls, field_list)
if not getattr(cls, "__doc__"): if not getattr(cls, "__doc__"):
# Create a class doc-string. # Create a class doc-string.
cls.__doc__ = cls.__name__ + str(inspect.signature(cls)).replace(" -> None", "") cls.__doc__ = cls.__name__ + str(inspect.signature(cls)).replace(" -> None", "")
return cls return cls
@ -1015,7 +984,6 @@ def dataclass(
if _cls is None: if _cls is None:
# We're called with parens. # We're called with parens.
return wrap return wrap
# We're called as @dataclass without parens. # We're called as @dataclass without parens.
return wrap(_cls) return wrap(_cls)
@ -1032,7 +1000,6 @@ def fields(class_or_instance):
fields = getattr(class_or_instance, _FIELDS) fields = getattr(class_or_instance, _FIELDS)
except AttributeError: except AttributeError:
raise TypeError("must be called with a dataclass type or instance") raise TypeError("must be called with a dataclass type or instance")
# Exclude pseudo-fields. Note that fields is sorted by insertion # Exclude pseudo-fields. Note that fields is sorted by insertion
# order, so the order of the tuple is as the fields were defined. # order, so the order of the tuple is as the fields were defined.
return tuple(f for f in fields.values() if f._field_type is _FIELD) return tuple(f for f in fields.values() if f._field_type is _FIELD)
@ -1174,7 +1141,6 @@ def make_dataclass(
else: else:
# Copy namespace since we're going to mutate it. # Copy namespace since we're going to mutate it.
namespace = namespace.copy() namespace = namespace.copy()
# While we're looking through the field names, validate that they # While we're looking through the field names, validate that they
# are identifiers, are not keywords, and not duplicates. # are identifiers, are not keywords, and not duplicates.
seen = set() seen = set()
@ -1184,23 +1150,20 @@ def make_dataclass(
name = item name = item
tp = "typing.Any" tp = "typing.Any"
elif len(item) == 2: elif len(item) == 2:
name, tp, = item (name, tp) = item
elif len(item) == 3: elif len(item) == 3:
name, tp, spec = item name, tp, spec = item
namespace[name] = spec namespace[name] = spec
else: else:
raise TypeError(f"Invalid field: {item!r}") raise TypeError(f"Invalid field: {item!r}")
if not isinstance(name, str) or not name.isidentifier(): if not isinstance(name, str) or not name.isidentifier():
raise TypeError(f"Field names must be valid identifers: {name!r}") raise TypeError(f"Field names must be valid identifers: {name!r}")
if keyword.iskeyword(name): if keyword.iskeyword(name):
raise TypeError(f"Field names must not be keywords: {name!r}") raise TypeError(f"Field names must not be keywords: {name!r}")
if name in seen: if name in seen:
raise TypeError(f"Field name duplicated: {name!r}") raise TypeError(f"Field name duplicated: {name!r}")
seen.add(name) seen.add(name)
anns[name] = tp anns[name] = tp
namespace["__annotations__"] = anns namespace["__annotations__"] = anns
# We use `types.new_class()` instead of simply `type()` to allow dynamic creation # We use `types.new_class()` instead of simply `type()` to allow dynamic creation
# of generic dataclassses. # of generic dataclassses.
@ -1236,7 +1199,6 @@ def replace(obj, **changes):
if not _is_dataclass_instance(obj): if not _is_dataclass_instance(obj):
raise TypeError("replace() should be called on dataclass instances") raise TypeError("replace() should be called on dataclass instances")
# It's an error to have init=False fields in 'changes'. # It's an error to have init=False fields in 'changes'.
# If a field is not in 'changes', read its value from the provided obj. # If a field is not in 'changes', read its value from the provided obj.
@ -1250,10 +1212,8 @@ def replace(obj, **changes):
"replace()" "replace()"
) )
continue continue
if f.name not in changes: if f.name not in changes:
changes[f.name] = getattr(obj, f.name) changes[f.name] = getattr(obj, f.name)
# Create the new object, which calls __init__() and # Create the new object, which calls __init__() and
# __post_init__() (if defined), using all of the init fields we've # __post_init__() (if defined), using all of the init fields we've
# added and/or left in 'changes'. If there are values supplied in # added and/or left in 'changes'. If there are values supplied in

View File

@ -92,12 +92,10 @@ class Node(AbstractNode):
_type, _id = cls.from_global_id(global_id) _type, _id = cls.from_global_id(global_id)
except Exception as e: except Exception as e:
raise Exception( raise Exception(
(
f'Unable to parse global ID "{global_id}". ' f'Unable to parse global ID "{global_id}". '
'Make sure it is a base64 encoded string in the format: "TypeName:id". ' 'Make sure it is a base64 encoded string in the format: "TypeName:id". '
f"Exception message: {str(e)}" f"Exception message: {str(e)}"
) )
)
graphene_type = info.schema.get_type(_type) graphene_type = info.schema.get_type(_type)
if graphene_type is None: if graphene_type is None:

View File

@ -51,10 +51,10 @@ letters = {letter: Letter(id=i, letter=letter) for i, letter in enumerate(letter
def edges(selected_letters): def edges(selected_letters):
return [ return [
{ {
"node": {"id": base64("Letter:%s" % l.id), "letter": l.letter}, "node": {"id": base64("Letter:%s" % letter.id), "letter": letter.letter},
"cursor": base64("arrayconnection:%s" % l.id), "cursor": base64("arrayconnection:%s" % letter.id),
} }
for l in [letters[i] for i in selected_letters] for letter in [letters[i] for i in selected_letters]
] ]

View File

@ -51,10 +51,10 @@ letters = {letter: Letter(id=i, letter=letter) for i, letter in enumerate(letter
def edges(selected_letters): def edges(selected_letters):
return [ return [
{ {
"node": {"id": base64("Letter:%s" % l.id), "letter": l.letter}, "node": {"id": base64("Letter:%s" % letter.id), "letter": letter.letter},
"cursor": base64("arrayconnection:%s" % l.id), "cursor": base64("arrayconnection:%s" % letter.id),
} }
for l in [letters[i] for i in selected_letters] for letter in [letters[i] for i in selected_letters]
] ]
@ -66,7 +66,6 @@ def cursor_for(ltr):
async def execute(args=""): async def execute(args=""):
if args: if args:
args = "(" + args + ")" args = "(" + args + ")"
return await schema.execute_async( return await schema.execute_async(
""" """
{ {
@ -164,14 +163,14 @@ async def test_respects_first_and_after_and_before_too_few():
@mark.asyncio @mark.asyncio
async def test_respects_first_and_after_and_before_too_many(): async def test_respects_first_and_after_and_before_too_many():
await check( await check(
f'first: 4, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BCD", f'first: 4, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BCD"
) )
@mark.asyncio @mark.asyncio
async def test_respects_first_and_after_and_before_exactly_right(): async def test_respects_first_and_after_and_before_exactly_right():
await check( await check(
f'first: 3, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BCD", f'first: 3, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BCD"
) )
@ -187,14 +186,14 @@ async def test_respects_last_and_after_and_before_too_few():
@mark.asyncio @mark.asyncio
async def test_respects_last_and_after_and_before_too_many(): async def test_respects_last_and_after_and_before_too_many():
await check( await check(
f'last: 4, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BCD", f'last: 4, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BCD"
) )
@mark.asyncio @mark.asyncio
async def test_respects_last_and_after_and_before_exactly_right(): async def test_respects_last_and_after_and_before_exactly_right():
await check( await check(
f'last: 3, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BCD", f'last: 3, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BCD"
) )

View File

@ -76,7 +76,6 @@ class Mutation(ObjectType):
): ):
if not _meta: if not _meta:
_meta = MutationOptions(cls) _meta = MutationOptions(cls)
output = output or getattr(cls, "Output", None) output = output or getattr(cls, "Output", None)
fields = {} fields = {}
@ -85,43 +84,35 @@ class Mutation(ObjectType):
interface, Interface interface, Interface
), f'All interfaces of {cls.__name__} must be a subclass of Interface. Received "{interface}".' ), f'All interfaces of {cls.__name__} must be a subclass of Interface. Received "{interface}".'
fields.update(interface._meta.fields) fields.update(interface._meta.fields)
if not output: if not output:
# If output is defined, we don't need to get the fields # If output is defined, we don't need to get the fields
fields = {} fields = {}
for base in reversed(cls.__mro__): for base in reversed(cls.__mro__):
fields.update(yank_fields_from_attrs(base.__dict__, _as=Field)) fields.update(yank_fields_from_attrs(base.__dict__, _as=Field))
output = cls output = cls
if not arguments: if not arguments:
input_class = getattr(cls, "Arguments", None) input_class = getattr(cls, "Arguments", None)
if not input_class: if not input_class:
input_class = getattr(cls, "Input", None) input_class = getattr(cls, "Input", None)
if input_class: if input_class:
warn_deprecation( warn_deprecation(
(
f"Please use {cls.__name__}.Arguments instead of {cls.__name__}.Input." f"Please use {cls.__name__}.Arguments instead of {cls.__name__}.Input."
" Input is now only used in ClientMutationID.\n" " Input is now only used in ClientMutationID.\n"
"Read more:" "Read more:"
" https://github.com/graphql-python/graphene/blob/v2.0.0/UPGRADE-v2.0.md#mutation-input" " https://github.com/graphql-python/graphene/blob/v2.0.0/UPGRADE-v2.0.md#mutation-input"
) )
)
if input_class: if input_class:
arguments = props(input_class) arguments = props(input_class)
else: else:
arguments = {} arguments = {}
if not resolver: if not resolver:
mutate = getattr(cls, "mutate", None) mutate = getattr(cls, "mutate", None)
assert mutate, "All mutations must define a mutate method in it" assert mutate, "All mutations must define a mutate method in it"
resolver = get_unbound_function(mutate) resolver = get_unbound_function(mutate)
if _meta.fields: if _meta.fields:
_meta.fields.update(fields) _meta.fields.update(fields)
else: else:
_meta.fields = fields _meta.fields = fields
_meta.interfaces = interfaces _meta.interfaces = interfaces
_meta.output = output _meta.output = output
_meta.resolver = resolver _meta.resolver = resolver
@ -133,7 +124,7 @@ class Mutation(ObjectType):
def Field( def Field(
cls, name=None, description=None, deprecation_reason=None, required=False cls, name=None, description=None, deprecation_reason=None, required=False
): ):
""" Mount instance of mutation Field. """ """Mount instance of mutation Field."""
return Field( return Field(
cls._meta.output, cls._meta.output,
args=cls._meta.arguments, args=cls._meta.arguments,

View File

@ -7,7 +7,6 @@ try:
from dataclasses import make_dataclass, field from dataclasses import make_dataclass, field
except ImportError: except ImportError:
from ..pyutils.dataclasses import make_dataclass, field # type: ignore from ..pyutils.dataclasses import make_dataclass, field # type: ignore
# For static type checking with Mypy # For static type checking with Mypy
MYPY = False MYPY = False
if MYPY: if MYPY:
@ -28,7 +27,7 @@ class ObjectTypeMeta(BaseTypeMeta):
pass pass
base_cls = super().__new__( base_cls = super().__new__(
cls, name_, (InterObjectType,) + bases, namespace, **options, cls, name_, (InterObjectType,) + bases, namespace, **options
) )
if base_cls._meta: if base_cls._meta:
fields = [ fields = [
@ -133,7 +132,6 @@ class ObjectType(BaseType, metaclass=ObjectTypeMeta):
): ):
if not _meta: if not _meta:
_meta = ObjectTypeOptions(cls) _meta = ObjectTypeOptions(cls)
fields = {} fields = {}
for interface in interfaces: for interface in interfaces:
@ -141,10 +139,8 @@ class ObjectType(BaseType, metaclass=ObjectTypeMeta):
interface, Interface interface, Interface
), f'All interfaces of {cls.__name__} must be a subclass of Interface. Received "{interface}".' ), f'All interfaces of {cls.__name__} must be a subclass of Interface. Received "{interface}".'
fields.update(interface._meta.fields) fields.update(interface._meta.fields)
for base in reversed(cls.__mro__): for base in reversed(cls.__mro__):
fields.update(yank_fields_from_attrs(base.__dict__, _as=Field)) fields.update(yank_fields_from_attrs(base.__dict__, _as=Field))
assert not (possible_types and cls.is_type_of), ( assert not (possible_types and cls.is_type_of), (
f"{cls.__name__}.Meta.possible_types will cause type collision with {cls.__name__}.is_type_of. " f"{cls.__name__}.Meta.possible_types will cause type collision with {cls.__name__}.is_type_of. "
"Please use one or other." "Please use one or other."
@ -154,7 +150,6 @@ class ObjectType(BaseType, metaclass=ObjectTypeMeta):
_meta.fields.update(fields) _meta.fields.update(fields)
else: else:
_meta.fields = fields _meta.fields = fields
if not _meta.interfaces: if not _meta.interfaces:
_meta.interfaces = interfaces _meta.interfaces = interfaces
_meta.possible_types = possible_types _meta.possible_types = possible_types

View File

@ -27,8 +27,6 @@ from graphql import (
GraphQLSchema, GraphQLSchema,
GraphQLString, GraphQLString,
) )
from graphql.execution import ExecutionContext
from graphql.execution.values import get_argument_values
from ..utils.str_converters import to_camel_case from ..utils.str_converters import to_camel_case
from ..utils.get_unbound_function import get_unbound_function from ..utils.get_unbound_function import get_unbound_function

View File

@ -64,15 +64,11 @@ def test_base64_query_none():
def test_base64_query_invalid(): def test_base64_query_invalid():
bad_inputs = [ bad_inputs = [dict(), 123, "This is not valid base64"]
dict(),
123,
"This is not valid base64",
]
for input_ in bad_inputs: for input_ in bad_inputs:
result = schema.execute( result = schema.execute(
"""{ base64(input: $input) }""", variables={"input": input_}, """{ base64(input: $input) }""", variables={"input": input_}
) )
assert isinstance(result.errors, list) assert isinstance(result.errors, list)
assert len(result.errors) == 1 assert len(result.errors) == 1

View File

@ -26,8 +26,8 @@ def test_enum_construction():
assert RGB._meta.description == "Description" assert RGB._meta.description == "Description"
values = RGB._meta.enum.__members__.values() values = RGB._meta.enum.__members__.values()
assert sorted([v.name for v in values]) == ["BLUE", "GREEN", "RED"] assert sorted(v.name for v in values) == ["BLUE", "GREEN", "RED"]
assert sorted([v.description for v in values]) == [ assert sorted(v.description for v in values) == [
"Description BLUE", "Description BLUE",
"Description GREEN", "Description GREEN",
"Description RED", "Description RED",
@ -52,7 +52,7 @@ def test_enum_instance_construction():
RGB = Enum("RGB", "RED,GREEN,BLUE") RGB = Enum("RGB", "RED,GREEN,BLUE")
values = RGB._meta.enum.__members__.values() values = RGB._meta.enum.__members__.values()
assert sorted([v.name for v in values]) == ["BLUE", "GREEN", "RED"] assert sorted(v.name for v in values) == ["BLUE", "GREEN", "RED"]
def test_enum_from_builtin_enum(): def test_enum_from_builtin_enum():
@ -465,7 +465,7 @@ def test_mutation_enum_input_type():
color color
} }
} }
""", """
) )
assert not result.errors assert not result.errors
assert result.data == {"createPaint": {"color": "RED"}} assert result.data == {"createPaint": {"color": "RED"}}

View File

@ -1,6 +1,5 @@
from graphql.type import GraphQLObjectType, GraphQLSchema from graphql.type import GraphQLObjectType, GraphQLSchema
from graphql import GraphQLError from pytest import raises
from pytest import mark, raises, fixture
from graphene.tests.utils import dedent from graphene.tests.utils import dedent

View File

@ -36,4 +36,4 @@ class OrderedType:
return NotImplemented return NotImplemented
def __hash__(self): def __hash__(self):
return hash((self.creation_counter)) return hash(self.creation_counter)

View File

@ -2,7 +2,4 @@ from .depth_limit import depth_limit_validator
from .disable_introspection import DisableIntrospection from .disable_introspection import DisableIntrospection
__all__ = [ __all__ = ["DisableIntrospection", "depth_limit_validator"]
"DisableIntrospection",
"depth_limit_validator"
]

View File

@ -30,7 +30,6 @@ try:
except ImportError: except ImportError:
# backwards compatibility for v3.6 # backwards compatibility for v3.6
from typing import Pattern from typing import Pattern
from typing import Callable, Dict, List, Optional, Union from typing import Callable, Dict, List, Optional, Union
from graphql import GraphQLError from graphql import GraphQLError
@ -75,7 +74,6 @@ def depth_limit_validator(
operation_name=name, operation_name=name,
ignore=ignore, ignore=ignore,
) )
if callable(callback): if callable(callback):
callback(query_depths) callback(query_depths)
super().__init__(validation_context) super().__init__(validation_context)
@ -90,7 +88,6 @@ def get_fragments(
for definition in definitions: for definition in definitions:
if isinstance(definition, FragmentDefinitionNode): if isinstance(definition, FragmentDefinitionNode):
fragments[definition.name.value] = definition fragments[definition.name.value] = definition
return fragments return fragments
@ -105,7 +102,6 @@ def get_queries_and_mutations(
if isinstance(definition, OperationDefinitionNode): if isinstance(definition, OperationDefinitionNode):
operation = definition.name.value if definition.name else "anonymous" operation = definition.name.value if definition.name else "anonymous"
operations[operation] = definition operations[operation] = definition
return operations return operations
@ -126,7 +122,6 @@ def determine_depth(
) )
) )
return depth_so_far return depth_so_far
if isinstance(node, FieldNode): if isinstance(node, FieldNode):
should_ignore = is_introspection_key(node.name.value) or is_ignored( should_ignore = is_introspection_key(node.name.value) or is_ignored(
node, ignore node, ignore
@ -134,7 +129,6 @@ def determine_depth(
if should_ignore or not node.selection_set: if should_ignore or not node.selection_set:
return 0 return 0
return 1 + max( return 1 + max(
map( map(
lambda selection: determine_depth( lambda selection: determine_depth(
@ -177,13 +171,14 @@ def determine_depth(
) )
) )
else: else:
raise Exception(f"Depth crawler cannot handle: {node.kind}.") # pragma: no cover raise Exception(
f"Depth crawler cannot handle: {node.kind}."
) # pragma: no cover
def is_ignored(node: FieldNode, ignore: Optional[List[IgnoreType]] = None) -> bool: def is_ignored(node: FieldNode, ignore: Optional[List[IgnoreType]] = None) -> bool:
if ignore is None: if ignore is None:
return False return False
for rule in ignore: for rule in ignore:
field_name = node.name.value field_name = node.name.value
if isinstance(rule, str): if isinstance(rule, str):
@ -197,5 +192,4 @@ def is_ignored(node: FieldNode, ignore: Optional[List[IgnoreType]] = None) -> bo
return True return True
else: else:
raise ValueError(f"Invalid ignore option: {rule}.") raise ValueError(f"Invalid ignore option: {rule}.")
return False return False

View File

@ -11,7 +11,6 @@ class DisableIntrospection(ValidationRule):
if is_introspection_key(field_name): if is_introspection_key(field_name):
self.report_error( self.report_error(
GraphQLError( GraphQLError(
f"Cannot query '{field_name}': introspection is disabled.", f"Cannot query '{field_name}': introspection is disabled.", node
node,
) )
) )

View File

@ -48,26 +48,11 @@ class HumanType(ObjectType):
class Query(ObjectType): class Query(ObjectType):
user = Field( user = Field(HumanType, required=True, name=String())
HumanType, version = String(required=True)
required=True, user1 = Field(HumanType, required=True)
name=String() user2 = Field(HumanType, required=True)
) user3 = Field(HumanType, required=True)
version = String(
required=True
)
user1 = Field(
HumanType,
required=True
)
user2 = Field(
HumanType,
required=True
)
user3 = Field(
HumanType,
required=True
)
@staticmethod @staticmethod
def resolve_user(root, info, name=None): def resolve_user(root, info, name=None):
@ -91,9 +76,7 @@ def run_query(query: str, max_depth: int, ignore=None):
document_ast=document, document_ast=document,
rules=( rules=(
depth_limit_validator( depth_limit_validator(
max_depth=max_depth, max_depth=max_depth, ignore=ignore, callback=callback
ignore=ignore,
callback=callback
), ),
), ),
) )
@ -253,11 +236,7 @@ def test_should_ignore_field():
errors, result = run_query( errors, result = run_query(
query, query,
10, 10,
ignore=[ ignore=["user1", re.compile("user2"), lambda field_name: field_name == "user3"],
"user1",
re.compile("user2"),
lambda field_name: field_name == "user3",
],
) )
expected = {"read1": 2, "read2": 0} expected = {"read1": 2, "read2": 0}
@ -272,8 +251,4 @@ def test_should_raise_invalid_ignore():
} }
""" """
with raises(ValueError, match="Invalid ignore option:"): with raises(ValueError, match="Invalid ignore option:"):
run_query( run_query(query, 10, ignore=[True])
query,
10,
ignore=[True],
)

View File

@ -5,9 +5,7 @@ from ..disable_introspection import DisableIntrospection
class Query(ObjectType): class Query(ObjectType):
name = String( name = String(required=True)
required=True
)
@staticmethod @staticmethod
def resolve_name(root, info): def resolve_name(root, info):
@ -23,9 +21,7 @@ def run_query(query: str):
errors = validate( errors = validate(
schema=schema.graphql_schema, schema=schema.graphql_schema,
document_ast=document, document_ast=document,
rules=( rules=(DisableIntrospection,),
DisableIntrospection,
),
) )
return errors return errors

10
tox.ini
View File

@ -11,25 +11,25 @@ commands =
py{36,37,38}: pytest --cov=graphene graphene examples {posargs} py{36,37,38}: pytest --cov=graphene graphene examples {posargs}
[testenv:pre-commit] [testenv:pre-commit]
basepython=python3.7 basepython=python3.8
deps = deps =
pre-commit>=2,<3 pre-commit>=2,<3
setenv = setenv =
LC_CTYPE=en_US.UTF-8 LC_CTYPE=en_US.UTF-8
commands = commands =
pre-commit {posargs:run --all-files} pre-commit run --all-files --show-diff-on-failure
[testenv:mypy] [testenv:mypy]
basepython=python3.7 basepython=python3.8
deps = deps =
mypy>=0.761,<1 mypy>=0.761,<1
commands = commands =
mypy graphene mypy graphene
[testenv:flake8] [testenv:flake8]
basepython=python3.7 basepython=python3.8
deps = deps =
flake8>=3.7,<4 flake8>=3.8,<4
commands = commands =
pip install --pre -e . pip install --pre -e .
flake8 graphene flake8 graphene