Merge branch 'master' into master

This commit is contained in:
Erik Wrede 2022-07-27 19:56:25 +02:00 committed by GitHub
commit b62f246300
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
74 changed files with 1199 additions and 622 deletions

25
.github/workflows/coveralls.yml vendored Normal file
View File

@ -0,0 +1,25 @@
name: 📊 Check Coverage
on:
push:
branches:
- master
- '*.x'
paths-ignore:
- 'docs/**'
- '*.md'
- '*.rst'
pull_request:
branches:
- master
- '*.x'
paths-ignore:
- 'docs/**'
- '*.md'
- '*.rst'
jobs:
coveralls_finish:
# check coverage increase/decrease
runs-on: ubuntu-latest
steps:
- name: Coveralls Finished
uses: AndreMiras/coveralls-python-action@develop

26
.github/workflows/deploy.yml vendored Normal file
View File

@ -0,0 +1,26 @@
name: 🚀 Deploy to PyPI
on:
push:
tags:
- 'v*'
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up Python 3.9
uses: actions/setup-python@v2
with:
python-version: 3.9
- name: Build wheel and source tarball
run: |
pip install wheel
python setup.py sdist bdist_wheel
- name: Publish a Python distribution to PyPI
uses: pypa/gh-action-pypi-publish@v1.1.0
with:
user: __token__
password: ${{ secrets.pypi_password }}

26
.github/workflows/lint.yml vendored Normal file
View File

@ -0,0 +1,26 @@
name: 💅 Lint
on: [push, pull_request]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v2
- name: Set up Python 3.9
uses: actions/setup-python@v2
with:
python-version: 3.9
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install tox
- name: Run lint
run: tox
env:
TOXENV: pre-commit
- name: Run mypy
run: tox
env:
TOXENV: mypy

66
.github/workflows/tests.yml vendored Normal file
View File

@ -0,0 +1,66 @@
name: 📄 Tests
on:
push:
branches:
- master
- '*.x'
paths-ignore:
- 'docs/**'
- '*.md'
- '*.rst'
pull_request:
branches:
- master
- '*.x'
paths-ignore:
- 'docs/**'
- '*.md'
- '*.rst'
jobs:
tests:
# runs the test suite
name: ${{ matrix.name }}
runs-on: ${{ matrix.os }}
strategy:
fail-fast: false
matrix:
include:
- {name: '3.10', python: '3.10', os: ubuntu-latest, tox: py310}
- {name: '3.9', python: '3.9', os: ubuntu-latest, tox: py39}
- { name: '3.8', python: '3.8', os: ubuntu-latest, tox: py38 }
- { name: '3.7', python: '3.7', os: ubuntu-latest, tox: py37 }
- { name: '3.6', python: '3.6', os: ubuntu-latest, tox: py36 }
steps:
- uses: actions/checkout@v3
- uses: actions/setup-python@v3
with:
python-version: ${{ matrix.python }}
- name: update pip
run: |
pip install -U wheel
pip install -U setuptools
python -m pip install -U pip
- name: get pip cache dir
id: pip-cache
run: echo "::set-output name=dir::$(pip cache dir)"
- name: cache pip dependencies
uses: actions/cache@v3
with:
path: ${{ steps.pip-cache.outputs.dir }}
key: pip|${{ runner.os }}|${{ matrix.python }}|${{ hashFiles('setup.py') }}
- run: pip install tox
- run: tox -e ${{ matrix.tox }}
- name: Upload coverage.xml
if: ${{ matrix.python == '3.10' }}
uses: actions/upload-artifact@v3
with:
name: graphene-sqlalchemy-coverage
path: coverage.xml
if-no-files-found: error
- name: Upload coverage.xml to codecov
if: ${{ matrix.python == '3.10' }}
uses: codecov/codecov-action@v3

View File

@ -1,6 +1,9 @@
default_language_version:
python: python3.9
repos:
- repo: git://github.com/pre-commit/pre-commit-hooks
rev: v2.1.0
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.2.0
hooks:
- id: check-merge-conflict
- id: check-json
@ -14,15 +17,14 @@ repos:
- id: trailing-whitespace
exclude: README.md
- repo: https://github.com/asottile/pyupgrade
rev: v1.12.0
rev: v2.32.1
hooks:
- id: pyupgrade
- repo: https://github.com/ambv/black
rev: 19.10b0
rev: 22.3.0
hooks:
- id: black
language_version: python3
- id: black
- repo: https://github.com/PyCQA/flake8
rev: 3.7.8
rev: 4.0.1
hooks:
- id: flake8

View File

@ -1,42 +0,0 @@
language: python
dist: xenial
python:
- "3.6"
- "3.7"
- "3.8"
install:
- pip install tox tox-travis
script: tox
after_success:
- pip install coveralls
- coveralls
cache:
directories:
- $HOME/.cache/pip
- $HOME/.cache/pre-commit
stages:
- test
- name: deploy
if: tag IS present
jobs:
fast_finish: true
include:
- env: TOXENV=pre-commit
python: 3.7
- env: TOXENV=mypy
python: 3.7
- stage: deploy
python: 3.7
after_success: true
deploy:
provider: pypi
user: syrusakbary
on:
tags: true
password:
secure: LHOp9DvYR+70vj4YVY8+JRNCKUOfYZREEUY3+4lMUpY7Zy5QwDfgEMXG64ybREH9dFldpUqVXRj53eeU3spfudSfh8NHkgqW7qihez2AhSnRc4dK6ooNfB+kLcSoJ4nUFGxdYImABc4V1hJvflGaUkTwDNYVxJF938bPaO797IvSbuI86llwqkvuK2Vegv9q/fy9sVGaF9VZIs4JgXwR5AyDR7FBArl+S84vWww4vTFD33hoE88VR4QvFY3/71BwRtQrnCMm7AOm31P9u29yi3bpzQpiOR2rHsgrsYdm597QzFKVxYwsmf9uAx2bpbSPy2WibunLePIvOFwm8xcfwnz4/J4ONBc5PSFmUytTWpzEnxb0bfUNLuYloIS24V6OZ8BfAhiYZ1AwySeJCQDM4Vk1V8IF6trTtyx5EW/uV9jsHCZ3LFsAD7UnFRTosIgN3SAK3ZWCEk5oF2IvjecsolEfkRXB3q9EjMkkuXRUeFDH2lWJLgNE27BzY6myvZVzPmfwZUsPBlPD/6w+WLSp97Rjgr9zS3T1d4ddqFM4ZYu04f2i7a/UUQqG+itzzuX5DWLPvzuNt37JB45mB9IsvxPyXZ6SkAcLl48NGyKok1f3vQnvphkfkl4lni29woKhaau8xlsuEDrcwOoeAsVcZXiItg+l+z2SlIwM0A06EvQ=
distributions: "sdist bdist_wheel"

View File

@ -4,12 +4,6 @@
**We are looking for contributors**! Please check the [ROADMAP](https://github.com/graphql-python/graphene/blob/master/ROADMAP.md) to see how you can help ❤️
---
**The below readme is the documentation for the `dev` (prerelease) version of Graphene. To view the documentation for the latest stable Graphene version go to the [v2 docs](https://docs.graphene-python.org/en/stable/)**
---
## Introduction
[Graphene](http://graphene-python.org) is an opinionated Python library for building GraphQL schemas/types fast and easily.
@ -37,7 +31,7 @@ Also, Graphene is fully compatible with the GraphQL spec, working seamlessly wit
For instaling graphene, just run this command in your shell
```bash
pip install "graphene>=2.0"
pip install "graphene>=3.0"
```
## Examples

View File

@ -1,18 +1,18 @@
|Graphene Logo| `Graphene <http://graphene-python.org>`__ |Build Status| |PyPI version| |Coverage Status|
=========================================================================================================
`💬 Join the community on
Slack <https://join.slack.com/t/graphenetools/shared_invite/enQtOTE2MDQ1NTg4MDM1LTA4Nzk0MGU0NGEwNzUxZGNjNDQ4ZjAwNDJjMjY0OGE1ZDgxZTg4YjM2ZTc4MjE2ZTAzZjE2ZThhZTQzZTkyMmM>`__
**We are looking for contributors**! Please check the
`ROADMAP <https://github.com/graphql-python/graphene/blob/master/ROADMAP.md>`__
to see how you can help ❤️
--------------
|Graphene Logo| `Graphene <http://graphene-python.org>`__ |Build Status| |PyPI version| |Coverage Status|
=========================================================================================================
Introduction
------------
`Graphene <http://graphene-python.org>`__ is a Python library for
building GraphQL schemas/types fast and easily.
`Graphene <http://graphene-python.org>`__ is an opinionated Python
library for building GraphQL schemas/types fast and easily.
- **Easy to use:** Graphene helps you use GraphQL in Python without
effort.
@ -27,17 +27,18 @@ Integrations
Graphene has multiple integrations with different frameworks:
+---------------------+----------------------------------------------------------------------------------------------+
| integration | Package |
+=====================+==============================================================================================+
| Django | `graphene-django <https://github.com/graphql-python/graphene-django/>`__ |
+---------------------+----------------------------------------------------------------------------------------------+
| SQLAlchemy | `graphene-sqlalchemy <https://github.com/graphql-python/graphene-sqlalchemy/>`__ |
+---------------------+----------------------------------------------------------------------------------------------+
| Google App Engine | `graphene-gae <https://github.com/graphql-python/graphene-gae/>`__ |
+---------------------+----------------------------------------------------------------------------------------------+
| Peewee | *In progress* (`Tracking Issue <https://github.com/graphql-python/graphene/issues/289>`__) |
+---------------------+----------------------------------------------------------------------------------------------+
+-------------------+-------------------------------------------------+
| integration | Package |
+===================+=================================================+
| Django | `graphene-django <https:/ |
| | /github.com/graphql-python/graphene-django/>`__ |
+-------------------+-------------------------------------------------+
| SQLAlchemy | `graphene-sqlalchemy <https://git |
| | hub.com/graphql-python/graphene-sqlalchemy/>`__ |
+-------------------+-------------------------------------------------+
| Google App Engine | `graphene-gae <http |
| | s://github.com/graphql-python/graphene-gae/>`__ |
+-------------------+-------------------------------------------------+
Also, Graphene is fully compatible with the GraphQL spec, working
seamlessly with all GraphQL clients, such as
@ -52,13 +53,7 @@ For instaling graphene, just run this command in your shell
.. code:: bash
pip install "graphene>=2.0"
2.0 Upgrade Guide
-----------------
Please read `UPGRADE-v2.0.md </UPGRADE-v2.0.md>`__ to learn how to
upgrade.
pip install "graphene>=3.0"
Examples
--------
@ -67,26 +62,26 @@ Here is one example for you to get started:
.. code:: python
import graphene
import graphene
class Query(graphene.ObjectType):
hello = graphene.String(description='A typical hello world')
class Query(graphene.ObjectType):
hello = graphene.String(description='A typical hello world')
def resolve_hello(self, info):
return 'World'
def resolve_hello(self, info):
return 'World'
schema = graphene.Schema(query=Query)
schema = graphene.Schema(query=Query)
Then Querying ``graphene.Schema`` is as simple as:
.. code:: python
query = '''
query SayHello {
hello
}
'''
result = schema.execute(query)
query = '''
query SayHello {
hello
}
'''
result = schema.execute(query)
If you want to learn even more, you can also check the following
`examples <examples/>`__:
@ -110,20 +105,20 @@ dependencies are installed by running:
.. code:: sh
virtualenv venv
source venv/bin/activate
pip install -e ".[test]"
virtualenv venv
source venv/bin/activate
pip install -e ".[test]"
Well-written tests and maintaining good test coverage is important to
this project. While developing, run new and existing tests with:
.. code:: sh
py.test graphene/relay/tests/test_node.py # Single file
py.test graphene/relay # All tests in directory
py.test graphene/relay/tests/test_node.py # Single file
py.test graphene/relay # All tests in directory
Add the ``-s`` flag if you have introduced breakpoints into the code for
debugging. Add the ``-v`` ("verbose") flag to get more detailed test
debugging. Add the ``-v`` (“verbose”) flag to get more detailed test
output. For even more detailed output, use ``-vv``. Check out the
`pytest documentation <https://docs.pytest.org/en/latest/>`__ for more
options and test running controls.
@ -132,7 +127,7 @@ You can also run the benchmarks with:
.. code:: sh
py.test graphene --benchmark-only
py.test graphene --benchmark-only
Graphene supports several versions of Python. To make sure that changes
do not break compatibility with any of those versions, we use ``tox`` to
@ -142,14 +137,14 @@ config file, just run:
.. code:: sh
tox
tox
If you wish to run against a specific version defined in the ``tox.ini``
file:
.. code:: sh
tox -e py36
tox -e py36
Tox can only use whatever versions of Python are installed on your
system. When you create a pull request, Travis will also be running the
@ -168,7 +163,7 @@ An HTML version of the documentation is produced by running:
.. code:: sh
make docs
make docs
.. |Graphene Logo| image:: http://graphene-python.org/favicon.png
.. |Build Status| image:: https://travis-ci.org/graphql-python/graphene.svg?branch=master

View File

@ -153,7 +153,7 @@ class Query(ObjectType):
```
Also, if you wanted to create an `ObjectType` that implements `Node`, you have to do it
explicity.
explicitly.
## Django

View File

@ -123,7 +123,7 @@ def resolve_my_field(root, info, my_arg):
return ...
```
**PS.: Take care with receiving args like `my_arg` as above. This doesn't work for optional (non-required) arguments as stantard `Connection`'s arguments (first, before, after, before).**
**PS.: Take care with receiving args like `my_arg` as above. This doesn't work for optional (non-required) arguments as standard `Connection`'s arguments (first, last, after, before).**
You may need something like this:
```python

View File

@ -64,18 +64,18 @@ source_suffix = ".rst"
master_doc = "index"
# General information about the project.
project = u"Graphene"
copyright = u"Graphene 2016"
author = u"Syrus Akbary"
project = "Graphene"
copyright = "Graphene 2016"
author = "Syrus Akbary"
# The version info for the project you're documenting, acts as replacement for
# |version| and |release|, also used in various other places throughout the
# built documents.
#
# The short X.Y version.
version = u"1.0"
version = "1.0"
# The full version, including alpha/beta/rc tags.
release = u"1.0"
release = "1.0"
# The language for content autogenerated by Sphinx. Refer to documentation
# for a list of supported languages.
@ -278,7 +278,7 @@ latex_elements = {
# (source start file, target name, title,
# author, documentclass [howto, manual, or own class]).
latex_documents = [
(master_doc, "Graphene.tex", u"Graphene Documentation", u"Syrus Akbary", "manual")
(master_doc, "Graphene.tex", "Graphene Documentation", "Syrus Akbary", "manual")
]
# The name of an image file (relative to this directory) to place at the top of
@ -318,7 +318,7 @@ latex_documents = [
# One entry per manual page. List of tuples
# (source start file, name, description, authors, manual section).
man_pages = [(master_doc, "graphene", u"Graphene Documentation", [author], 1)]
man_pages = [(master_doc, "graphene", "Graphene Documentation", [author], 1)]
# If true, show URL addresses after external links.
#
@ -334,7 +334,7 @@ texinfo_documents = [
(
master_doc,
"Graphene",
u"Graphene Documentation",
"Graphene Documentation",
author,
"Graphene",
"One line description of project.",

View File

@ -28,10 +28,9 @@ Create loaders by providing a batch loading function.
A batch loading function accepts a list of keys, and returns a ``Promise``
which resolves to a list of ``values``.
Then load individual values from the loader. ``DataLoader`` will coalesce all
individual loads which occur within a single frame of execution (executed once
the wrapping promise is resolved) and then call your batch function with all
requested keys.
``DataLoader`` will coalesce all individual loads which occur within a
single frame of execution (executed once the wrapping promise is resolved)
and then call your batch function with all requested keys.
.. code:: python
@ -96,7 +95,7 @@ Consider the following GraphQL request:
}
Naively, if ``me``, ``bestFriend`` and ``friends`` each need to request the backend,
If ``me``, ``bestFriend`` and ``friends`` each need to send a request to the backend,
there could be at most 13 database requests!

View File

@ -110,7 +110,7 @@ If there are multiple operations defined in a query string, ``operation_name`` s
from graphene import ObjectType, Field, Schema
class Query(ObjectType):
me = Field(User)
user = Field(User)
def resolve_user(root, info):
return get_user_by_id(12)

View File

@ -4,5 +4,5 @@ File uploading
File uploading is not part of the official GraphQL spec yet and is not natively
implemented in Graphene.
If your server needs to support file uploading then you can use the libary: `graphene-file-upload <https://github.com/lmcgartland/graphene-file-upload>`_ which enhances Graphene to add file
If your server needs to support file uploading then you can use the library: `graphene-file-upload <https://github.com/lmcgartland/graphene-file-upload>`_ which enhances Graphene to add file
uploads and conforms to the unoffical GraphQL `multipart request spec <https://github.com/jaydenseric/graphql-multipart-request-spec>`_.

View File

@ -10,3 +10,4 @@ Execution
dataloader
fileuploading
subscriptions
queryvalidation

View File

@ -46,7 +46,7 @@ Functional example
------------------
Middleware can also be defined as a function. Here we define a middleware that
logs the time it takes to resolve each field
logs the time it takes to resolve each field:
.. code:: python

View File

@ -0,0 +1,123 @@
Query Validation
==========
GraphQL uses query validators to check if Query AST is valid and can be executed. Every GraphQL server implements
standard query validators. For example, there is an validator that tests if queried field exists on queried type, that
makes query fail with "Cannot query field on type" error if it doesn't.
To help with common use cases, graphene provides a few validation rules out of the box.
Depth limit Validator
-----------------
The depth limit validator helps to prevent execution of malicious
queries. It takes in the following arguments.
- ``max_depth`` is the maximum allowed depth for any operation in a GraphQL document.
- ``ignore`` Stops recursive depth checking based on a field name. Either a string or regexp to match the name, or a function that returns a boolean
- ``callback`` Called each time validation runs. Receives an Object which is a map of the depths for each operation.
Usage
-------
Here is how you would implement depth-limiting on your schema.
.. code:: python
from graphql import validate, parse
from graphene import ObjectType, Schema, String
from graphene.validation import depth_limit_validator
class MyQuery(ObjectType):
name = String(required=True)
schema = Schema(query=MyQuery)
# queries which have a depth more than 20
# will not be executed.
validation_errors = validate(
schema=schema.graphql_schema,
document_ast=parse('THE QUERY'),
rules=(
depth_limit_validator(
max_depth=20
),
)
)
Disable Introspection
---------------------
the disable introspection validation rule ensures that your schema cannot be introspected.
This is a useful security measure in production environments.
Usage
-------
Here is how you would disable introspection for your schema.
.. code:: python
from graphql import validate, parse
from graphene import ObjectType, Schema, String
from graphene.validation import DisableIntrospection
class MyQuery(ObjectType):
name = String(required=True)
schema = Schema(query=MyQuery)
# introspection queries will not be executed.
validation_errors = validate(
schema=schema.graphql_schema,
document_ast=parse('THE QUERY'),
rules=(
DisableIntrospection,
)
)
Implementing custom validators
------------------------------
All custom query validators should extend the `ValidationRule <https://github.com/graphql-python/graphql-core/blob/v3.0.5/src/graphql/validation/rules/__init__.py#L37>`_
base class importable from the graphql.validation.rules module. Query validators are visitor classes. They are
instantiated at the time of query validation with one required argument (context: ASTValidationContext). In order to
perform validation, your validator class should define one or more of enter_* and leave_* methods. For possible
enter/leave items as well as details on function documentation, please see contents of the visitor module. To make
validation fail, you should call validator's report_error method with the instance of GraphQLError describing failure
reason. Here is an example query validator that visits field definitions in GraphQL query and fails query validation
if any of those fields are blacklisted:
.. code:: python
from graphql import GraphQLError
from graphql.language import FieldNode
from graphql.validation import ValidationRule
my_blacklist = (
"disallowed_field",
)
def is_blacklisted_field(field_name: str):
return field_name.lower() in my_blacklist
class BlackListRule(ValidationRule):
def enter_field(self, node: FieldNode, *_args):
field_name = node.name.value
if not is_blacklisted_field(field_name):
return
self.report_error(
GraphQLError(
f"Cannot query '{field_name}': field is blacklisted.", node,
)
)

View File

@ -60,14 +60,14 @@ Requirements
~~~~~~~~~~~~
- Python (2.7, 3.4, 3.5, 3.6, pypy)
- Graphene (2.0)
- Graphene (3.0)
Project setup
~~~~~~~~~~~~~
.. code:: bash
pip install "graphene>=2.0"
pip install "graphene>=3.0"
Creating a basic Schema
~~~~~~~~~~~~~~~~~~~~~~~

View File

@ -19,10 +19,8 @@ Useful links
- `Getting started with Relay`_
- `Relay Global Identification Specification`_
- `Relay Cursor Connection Specification`_
- `Relay input Object Mutation`_
.. _Relay: https://facebook.github.io/relay/docs/en/graphql-server-specification.html
.. _Relay specification: https://facebook.github.io/relay/graphql/objectidentification.htm#sec-Node-root-field
.. _Getting started with Relay: https://facebook.github.io/relay/docs/en/quick-start-guide.html
.. _Relay Global Identification Specification: https://facebook.github.io/relay/graphql/objectidentification.htm
.. _Relay Cursor Connection Specification: https://facebook.github.io/relay/graphql/connections.htm
.. _Relay: https://relay.dev/docs/guides/graphql-server-specification/
.. _Getting started with Relay: https://relay.dev/docs/getting-started/step-by-step-guide/
.. _Relay Global Identification Specification: https://relay.dev/graphql/objectidentification.htm
.. _Relay Cursor Connection Specification: https://relay.dev/graphql/connections.htm

View File

@ -77,13 +77,13 @@ Snapshot testing
As our APIs evolve, we need to know when our changes introduce any breaking changes that might break
some of the clients of our GraphQL app.
However, writing tests and replicate the same response we expect from our GraphQL application can be
However, writing tests and replicating the same response we expect from our GraphQL application can be a
tedious and repetitive task, and sometimes it's easier to skip this process.
Because of that, we recommend the usage of `SnapshotTest <https://github.com/syrusakbary/snapshottest/>`_.
SnapshotTest let us write all this tests in a breeze, as creates automatically the ``snapshots`` for us
the first time the test is executed.
SnapshotTest lets us write all these tests in a breeze, as it automatically creates the ``snapshots`` for us
the first time the test are executed.
Here is a simple example on how our tests will look if we use ``pytest``:

View File

@ -44,7 +44,7 @@ Both of these types have all of the fields from the ``Character`` interface,
but also bring in extra fields, ``home_planet``, ``starships`` and
``primary_function``, that are specific to that particular type of character.
The full GraphQL schema defition will look like this:
The full GraphQL schema definition will look like this:
.. code::

View File

@ -85,9 +85,9 @@ We should receive:
InputFields and InputObjectTypes
----------------------------------
InputFields are used in mutations to allow nested input data for mutations
InputFields are used in mutations to allow nested input data for mutations.
To use an InputField you define an InputObjectType that specifies the structure of your input data
To use an InputField you define an InputObjectType that specifies the structure of your input data:
.. code:: python
@ -112,7 +112,7 @@ To use an InputField you define an InputObjectType that specifies the structure
return CreatePerson(person=person)
Note that **name** and **age** are part of **person_data** now
Note that **name** and **age** are part of **person_data** now.
Using the above mutation your new query would look like this:
@ -128,7 +128,7 @@ Using the above mutation your new query would look like this:
}
InputObjectTypes can also be fields of InputObjectTypes allowing you to have
as complex of input data as you need
as complex of input data as you need:
.. code:: python
@ -160,7 +160,7 @@ To return an existing ObjectType instead of a mutation-specific type, set the **
def mutate(root, info, name):
return Person(name=name)
Then, if we query (``schema.execute(query_str)``) the following:
Then, if we query (``schema.execute(query_str)``) with the following:
.. code::

View File

@ -44,7 +44,7 @@ There are some cases where the schema cannot access all of the types that we pla
For example, when a field returns an ``Interface``, the schema doesn't know about any of the
implementations.
In this case, we need to use the ``types`` argument when creating the Schema.
In this case, we need to use the ``types`` argument when creating the Schema:
.. code:: python
@ -63,7 +63,7 @@ By default all field and argument names (that are not
explicitly set with the ``name`` arg) will be converted from
``snake_case`` to ``camelCase`` (as the API is usually being consumed by a js/mobile client)
For example with the ObjectType
For example with the ObjectType the ``last_name`` field name is converted to ``lastName``:
.. code:: python
@ -71,12 +71,10 @@ For example with the ObjectType
last_name = graphene.String()
other_name = graphene.String(name='_other_Name')
the ``last_name`` field name is converted to ``lastName``.
In case you don't want to apply this transformation, provide a ``name`` argument to the field constructor.
``other_name`` converts to ``_other_Name`` (without further transformations).
Your query should look like
Your query should look like:
.. code::
@ -86,7 +84,7 @@ Your query should look like
}
To disable this behavior, set the ``auto_camelcase`` to ``False`` upon schema instantiation.
To disable this behavior, set the ``auto_camelcase`` to ``False`` upon schema instantiation:
.. code:: python

View File

@ -7,7 +7,7 @@ to specify any common fields between the types.
The basics:
- Each Union is a Python class that inherits from ``graphene.Union``.
- Unions don't have any fields on it, just links to the possible objecttypes.
- Unions don't have any fields on it, just links to the possible ObjectTypes.
Quick example
-------------

View File

@ -49,7 +49,7 @@ type Faction implements Node {
name: String
"""The ships used by the faction."""
ships(before: String = null, after: String = null, first: Int = null, last: Int = null): ShipConnection
ships(before: String, after: String, first: Int, last: Int): ShipConnection
}
"""An object with an ID"""
@ -115,5 +115,4 @@ input IntroduceShipInput {
shipName: String!
factionId: String!
clientMutationId: String
}
'''
}'''

View File

@ -9,7 +9,7 @@ client = Client(schema)
def test_str_schema(snapshot):
snapshot.assert_match(str(schema))
snapshot.assert_match(str(schema).strip())
def test_correctly_fetches_id_name_rebels(snapshot):

View File

@ -41,7 +41,7 @@ from .types import (
from .utils.module_loading import lazy_import
from .utils.resolve_only_args import resolve_only_args
VERSION = (3, 0, 0, "beta", 7)
VERSION = (3, 1, 0, "final", 0)
__version__ = get_version(VERSION)

View File

@ -291,14 +291,7 @@ class Field:
class _DataclassParams:
__slots__ = (
"init",
"repr",
"eq",
"order",
"unsafe_hash",
"frozen",
)
__slots__ = ("init", "repr", "eq", "order", "unsafe_hash", "frozen")
def __init__(self, init, repr, eq, order, unsafe_hash, frozen):
self.init = init
@ -442,13 +435,11 @@ def _field_init(f, frozen, globals, self_name):
# This field does not need initialization. Signify that
# to the caller by returning None.
return None
# Only test this now, so that we can create variables for the
# default. However, return None to signify that we're not going
# to actually do the assignment statement for InitVars.
if f._field_type == _FIELD_INITVAR:
return None
# Now, actually generate the field assignment.
return _field_assign(frozen, f.name, value, self_name)
@ -490,7 +481,6 @@ def _init_fn(fields, frozen, has_post_init, self_name):
raise TypeError(
f"non-default argument {f.name!r} " "follows default argument"
)
globals = {"MISSING": MISSING, "_HAS_DEFAULT_FACTORY": _HAS_DEFAULT_FACTORY}
body_lines = []
@ -500,16 +490,13 @@ def _init_fn(fields, frozen, has_post_init, self_name):
# initialization (it's a pseudo-field). Just skip it.
if line:
body_lines.append(line)
# Does this class have a post-init function?
if has_post_init:
params_str = ",".join(f.name for f in fields if f._field_type is _FIELD_INITVAR)
body_lines.append(f"{self_name}.{_POST_INIT_NAME}({params_str})")
# If no body lines, use 'pass'.
if not body_lines:
body_lines = ["pass"]
locals = {f"_type_{f.name}": f.type for f in fields}
return _create_fn(
"__init__",
@ -674,7 +661,6 @@ def _get_field(cls, a_name, a_type):
# This is a field in __slots__, so it has no default value.
default = MISSING
f = field(default=default)
# Only at this point do we know the name and the type. Set them.
f.name = a_name
f.type = a_type
@ -705,7 +691,6 @@ def _get_field(cls, a_name, a_type):
and _is_type(f.type, cls, typing, typing.ClassVar, _is_classvar)
):
f._field_type = _FIELD_CLASSVAR
# If the type is InitVar, or if it's a matching string annotation,
# then it's an InitVar.
if f._field_type is _FIELD:
@ -717,7 +702,6 @@ def _get_field(cls, a_name, a_type):
and _is_type(f.type, cls, dataclasses, dataclasses.InitVar, _is_initvar)
):
f._field_type = _FIELD_INITVAR
# Validations for individual fields. This is delayed until now,
# instead of in the Field() constructor, since only here do we
# know the field name, which allows for better error reporting.
@ -731,14 +715,12 @@ def _get_field(cls, a_name, a_type):
# example, how about init=False (or really,
# init=<not-the-default-init-value>)? It makes no sense for
# ClassVar and InitVar to specify init=<anything>.
# For real fields, disallow mutable defaults for known types.
if f._field_type is _FIELD and isinstance(f.default, (list, dict, set)):
raise ValueError(
f"mutable default {type(f.default)} for field "
f"{f.name} is not allowed: use default_factory"
)
return f
@ -827,7 +809,6 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen):
fields[f.name] = f
if getattr(b, _PARAMS).frozen:
any_frozen_base = True
# Annotations that are defined in this class (not in base
# classes). If __annotations__ isn't present, then this class
# adds no new annotations. We use this to compute fields that are
@ -866,22 +847,18 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen):
delattr(cls, f.name)
else:
setattr(cls, f.name, f.default)
# Do we have any Field members that don't also have annotations?
for name, value in cls.__dict__.items():
if isinstance(value, Field) and not name in cls_annotations:
raise TypeError(f"{name!r} is a field but has no type annotation")
# Check rules that apply if we are derived from any dataclasses.
if has_dataclass_bases:
# Raise an exception if any of our bases are frozen, but we're not.
if any_frozen_base and not frozen:
raise TypeError("cannot inherit non-frozen dataclass from a " "frozen one")
# Raise an exception if we're frozen, but none of our bases are.
if not any_frozen_base and frozen:
raise TypeError("cannot inherit frozen dataclass from a " "non-frozen one")
# Remember all of the fields on our class (including bases). This
# also marks this class as being a dataclass.
setattr(cls, _FIELDS, fields)
@ -900,7 +877,6 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen):
# eq methods.
if order and not eq:
raise ValueError("eq must be true if order is true")
if init:
# Does this class have a post-init function?
has_post_init = hasattr(cls, _POST_INIT_NAME)
@ -920,7 +896,6 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen):
"__dataclass_self__" if "self" in fields else "self",
),
)
# Get the fields as a list, and include only real fields. This is
# used in all of the following methods.
field_list = [f for f in fields.values() if f._field_type is _FIELD]
@ -928,7 +903,6 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen):
if repr:
flds = [f for f in field_list if f.repr]
_set_new_attribute(cls, "__repr__", _repr_fn(flds))
if eq:
# Create _eq__ method. There's no need for a __ne__ method,
# since python will call __eq__ and negate it.
@ -938,7 +912,6 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen):
_set_new_attribute(
cls, "__eq__", _cmp_fn("__eq__", "==", self_tuple, other_tuple)
)
if order:
# Create and set the ordering methods.
flds = [f for f in field_list if f.compare]
@ -958,7 +931,6 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen):
f"in class {cls.__name__}. Consider using "
"functools.total_ordering"
)
if frozen:
for fn in _frozen_get_del_attr(cls, field_list):
if _set_new_attribute(cls, fn.__name__, fn):
@ -966,7 +938,6 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen):
f"Cannot overwrite attribute {fn.__name__} "
f"in class {cls.__name__}"
)
# Decide if/how we're going to create a hash function.
hash_action = _hash_action[
bool(unsafe_hash), bool(eq), bool(frozen), has_explicit_hash
@ -975,11 +946,9 @@ def _process_class(cls, init, repr, eq, order, unsafe_hash, frozen):
# No need to call _set_new_attribute here, since by the time
# we're here the overwriting is unconditional.
cls.__hash__ = hash_action(cls, field_list)
if not getattr(cls, "__doc__"):
# Create a class doc-string.
cls.__doc__ = cls.__name__ + str(inspect.signature(cls)).replace(" -> None", "")
return cls
@ -1015,7 +984,6 @@ def dataclass(
if _cls is None:
# We're called with parens.
return wrap
# We're called as @dataclass without parens.
return wrap(_cls)
@ -1032,7 +1000,6 @@ def fields(class_or_instance):
fields = getattr(class_or_instance, _FIELDS)
except AttributeError:
raise TypeError("must be called with a dataclass type or instance")
# Exclude pseudo-fields. Note that fields is sorted by insertion
# order, so the order of the tuple is as the fields were defined.
return tuple(f for f in fields.values() if f._field_type is _FIELD)
@ -1174,7 +1141,6 @@ def make_dataclass(
else:
# Copy namespace since we're going to mutate it.
namespace = namespace.copy()
# While we're looking through the field names, validate that they
# are identifiers, are not keywords, and not duplicates.
seen = set()
@ -1184,23 +1150,20 @@ def make_dataclass(
name = item
tp = "typing.Any"
elif len(item) == 2:
name, tp, = item
(name, tp) = item
elif len(item) == 3:
name, tp, spec = item
namespace[name] = spec
else:
raise TypeError(f"Invalid field: {item!r}")
if not isinstance(name, str) or not name.isidentifier():
raise TypeError(f"Field names must be valid identifers: {name!r}")
if keyword.iskeyword(name):
raise TypeError(f"Field names must not be keywords: {name!r}")
if name in seen:
raise TypeError(f"Field name duplicated: {name!r}")
seen.add(name)
anns[name] = tp
namespace["__annotations__"] = anns
# We use `types.new_class()` instead of simply `type()` to allow dynamic creation
# of generic dataclassses.
@ -1229,14 +1192,13 @@ def replace(obj, **changes):
c = C(1, 2)
c1 = replace(c, x=3)
assert c1.x == 3 and c1.y == 2
"""
"""
# We're going to mutate 'changes', but that's okay because it's a
# new dict, even if called with 'replace(obj, **my_changes)'.
if not _is_dataclass_instance(obj):
raise TypeError("replace() should be called on dataclass instances")
# It's an error to have init=False fields in 'changes'.
# If a field is not in 'changes', read its value from the provided obj.
@ -1250,10 +1212,8 @@ def replace(obj, **changes):
"replace()"
)
continue
if f.name not in changes:
changes[f.name] = getattr(obj, f.name)
# Create the new object, which calls __init__() and
# __post_init__() (if defined), using all of the init fields we've
# added and/or left in 'changes'. If there are values supplied in

View File

@ -19,10 +19,7 @@ def get_version(version=None):
sub = ""
if version[3] == "alpha" and version[4] == 0:
git_changeset = get_git_changeset()
if git_changeset:
sub = ".dev%s" % git_changeset
else:
sub = ".dev"
sub = ".dev%s" % git_changeset if git_changeset else ".dev"
elif version[3] != "final":
mapping = {"alpha": "a", "beta": "b", "rc": "rc"}
sub = mapping[version[3]] + str(version[4])

View File

@ -18,11 +18,7 @@ def is_node(objecttype):
if not issubclass(objecttype, ObjectType):
return False
for i in objecttype._meta.interfaces:
if issubclass(i, Node):
return True
return False
return any(issubclass(i, Node) for i in objecttype._meta.interfaces)
class GlobalID(Field):
@ -90,13 +86,13 @@ class Node(AbstractNode):
def get_node_from_global_id(cls, info, global_id, only_type=None):
try:
_type, _id = cls.from_global_id(global_id)
if not _type:
raise ValueError("Invalid Global ID")
except Exception as e:
raise Exception(
(
f'Unable to parse global ID "{global_id}". '
'Make sure it is a base64 encoded string in the format: "TypeName:id". '
f"Exception message: {str(e)}"
)
f'Unable to parse global ID "{global_id}". '
'Make sure it is a base64 encoded string in the format: "TypeName:id". '
f"Exception message: {e}"
)
graphene_type = info.schema.get_type(_type)

View File

@ -51,10 +51,10 @@ letters = {letter: Letter(id=i, letter=letter) for i, letter in enumerate(letter
def edges(selected_letters):
return [
{
"node": {"id": base64("Letter:%s" % l.id), "letter": l.letter},
"cursor": base64("arrayconnection:%s" % l.id),
"node": {"id": base64("Letter:%s" % letter.id), "letter": letter.letter},
"cursor": base64("arrayconnection:%s" % letter.id),
}
for l in [letters[i] for i in selected_letters]
for letter in [letters[i] for i in selected_letters]
]

View File

@ -51,10 +51,10 @@ letters = {letter: Letter(id=i, letter=letter) for i, letter in enumerate(letter
def edges(selected_letters):
return [
{
"node": {"id": base64("Letter:%s" % l.id), "letter": l.letter},
"cursor": base64("arrayconnection:%s" % l.id),
"node": {"id": base64("Letter:%s" % letter.id), "letter": letter.letter},
"cursor": base64("arrayconnection:%s" % letter.id),
}
for l in [letters[i] for i in selected_letters]
for letter in [letters[i] for i in selected_letters]
]
@ -66,7 +66,6 @@ def cursor_for(ltr):
async def execute(args=""):
if args:
args = "(" + args + ")"
return await schema.execute_async(
"""
{
@ -164,14 +163,14 @@ async def test_respects_first_and_after_and_before_too_few():
@mark.asyncio
async def test_respects_first_and_after_and_before_too_many():
await check(
f'first: 4, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BCD",
f'first: 4, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BCD"
)
@mark.asyncio
async def test_respects_first_and_after_and_before_exactly_right():
await check(
f'first: 3, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BCD",
f'first: 3, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BCD"
)
@ -187,14 +186,14 @@ async def test_respects_last_and_after_and_before_too_few():
@mark.asyncio
async def test_respects_last_and_after_and_before_too_many():
await check(
f'last: 4, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BCD",
f'last: 4, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BCD"
)
@mark.asyncio
async def test_respects_last_and_after_and_before_exactly_right():
await check(
f'last: 3, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BCD",
f'last: 3, after: "{cursor_for("A")}", before: "{cursor_for("E")}"', "BCD"
)

View File

@ -1,7 +1,7 @@
import re
from graphql_relay import to_global_id
from textwrap import dedent
from graphene.tests.utils import dedent
from graphql_relay import to_global_id
from ...types import ObjectType, Schema, String
from ..node import Node, is_node
@ -171,8 +171,10 @@ def test_node_field_only_lazy_type_wrong():
def test_str_schema():
assert str(schema) == dedent(
'''
assert (
str(schema).strip()
== dedent(
'''
schema {
query: RootQuery
}
@ -213,4 +215,5 @@ def test_str_schema():
): MyNode
}
'''
).strip()
)

View File

@ -1,6 +1,6 @@
from graphql import graphql_sync
from textwrap import dedent
from graphene.tests.utils import dedent
from graphql import graphql_sync
from ...types import Interface, ObjectType, Schema
from ...types.scalars import Int, String
@ -54,8 +54,10 @@ graphql_schema = schema.graphql_schema
def test_str_schema_correct():
assert str(schema) == dedent(
'''
assert (
str(schema).strip()
== dedent(
'''
schema {
query: RootQuery
}
@ -93,6 +95,7 @@ def test_str_schema_correct():
): Node
}
'''
).strip()
)

View File

@ -6,7 +6,7 @@ from graphene.types.schema import Schema
def default_format_error(error):
if isinstance(error, GraphQLError):
return format_graphql_error(error)
return error.formatted
return {"message": str(error)}

View File

@ -0,0 +1,36 @@
from ...types import ObjectType, Schema, String, NonNull
class Query(ObjectType):
hello = String(input=NonNull(String))
def resolve_hello(self, info, input):
if input == "nothing":
return None
return f"Hello {input}!"
schema = Schema(query=Query)
def test_required_input_provided():
"""
Test that a required argument works when provided.
"""
input_value = "Potato"
result = schema.execute('{ hello(input: "%s") }' % input_value)
assert not result.errors
assert result.data == {"hello": "Hello Potato!"}
def test_required_input_missing():
"""
Test that a required argument raised an error if not provided.
"""
result = schema.execute("{ hello }")
assert result.errors
assert len(result.errors) == 1
assert (
result.errors[0].message
== "Field 'hello' argument 'input' of type 'String!' is required, but it was not provided."
)

View File

@ -0,0 +1,53 @@
import pytest
from ...types.base64 import Base64
from ...types.datetime import Date, DateTime
from ...types.decimal import Decimal
from ...types.generic import GenericScalar
from ...types.json import JSONString
from ...types.objecttype import ObjectType
from ...types.scalars import ID, BigInt, Boolean, Float, Int, String
from ...types.schema import Schema
from ...types.uuid import UUID
@pytest.mark.parametrize(
"input_type,input_value",
[
(Date, '"2022-02-02"'),
(GenericScalar, '"foo"'),
(Int, "1"),
(BigInt, "12345678901234567890"),
(Float, "1.1"),
(String, '"foo"'),
(Boolean, "true"),
(ID, "1"),
(DateTime, '"2022-02-02T11:11:11"'),
(UUID, '"cbebbc62-758e-4f75-a890-bc73b5017d81"'),
(Decimal, "1.1"),
(JSONString, '{key:"foo",value:"bar"}'),
(Base64, '"Q2hlbG8gd29ycmxkCg=="'),
],
)
def test_parse_literal_with_variables(input_type, input_value):
# input_b needs to be evaluated as literal while the variable dict for
# input_a is passed along.
class Query(ObjectType):
generic = GenericScalar(input_a=GenericScalar(), input_b=input_type())
def resolve_generic(self, info, input_a=None, input_b=None):
return input
schema = Schema(query=Query)
query = f"""
query Test($a: GenericScalar){{
generic(inputA: $a, inputB: {input_value})
}}
"""
result = schema.execute(
query,
variables={"a": "bar"},
)
assert not result.errors

View File

@ -1,9 +0,0 @@
from textwrap import dedent as _dedent
def dedent(text: str) -> str:
"""Fix indentation of given text by removing leading spaces and tabs.
Also removes leading newlines and trailing spaces and tabs, but keeps trailing
newlines.
"""
return _dedent(text.lstrip("\n").rstrip(" \t"))

View File

@ -1,4 +1,5 @@
from itertools import chain
from graphql import Undefined
from .dynamic import Dynamic
from .mountedtype import MountedType
@ -41,7 +42,7 @@ class Argument(MountedType):
def __init__(
self,
type_,
default_value=None,
default_value=Undefined,
description=None,
name=None,
required=False,

View File

@ -22,7 +22,7 @@ class Base64(Scalar):
return b64encode(value).decode("utf-8")
@classmethod
def parse_literal(cls, node):
def parse_literal(cls, node, _variables=None):
if not isinstance(node, StringValueNode):
raise GraphQLError(
f"Base64 cannot represent non-string value: {print_ast(node)}"

View File

@ -22,7 +22,7 @@ class Decimal(Scalar):
return str(dec)
@classmethod
def parse_literal(cls, node):
def parse_literal(cls, node, _variables=None):
if isinstance(node, (StringValueNode, IntValueNode)):
return cls.parse_value(node.value)

View File

@ -7,7 +7,6 @@ from graphql import (
GraphQLObjectType,
GraphQLScalarType,
GraphQLUnionType,
Undefined,
)
@ -50,7 +49,7 @@ class GrapheneEnumType(GrapheneGraphQLType, GraphQLEnumType):
try:
value = enum[value]
except KeyError:
return Undefined
pass
return super(GrapheneEnumType, self).serialize(value)

View File

@ -52,7 +52,10 @@ class EnumMeta(SubclassWithMeta_Meta):
return super(EnumMeta, cls).__call__(*args, **kwargs)
# return cls._meta.enum(*args, **kwargs)
def from_enum(cls, enum, description=None, deprecation_reason=None): # noqa: N805
def from_enum(
cls, enum, name=None, description=None, deprecation_reason=None
): # noqa: N805
name = name or enum.__name__
description = description or enum.__doc__
meta_dict = {
"enum": enum,
@ -60,7 +63,7 @@ class EnumMeta(SubclassWithMeta_Meta):
"deprecation_reason": deprecation_reason,
}
meta_class = type("Meta", (object,), meta_dict)
return type(meta_class.enum.__name__, (Enum,), {"Meta": meta_class})
return type(name, (Enum,), {"Meta": meta_class})
class Enum(UnmountedType, BaseType, metaclass=EnumMeta):

View File

@ -29,7 +29,7 @@ class GenericScalar(Scalar):
parse_value = identity
@staticmethod
def parse_literal(ast):
def parse_literal(ast, _variables=None):
if isinstance(ast, (StringValueNode, BooleanValueNode)):
return ast.value
elif isinstance(ast, IntValueNode):

View File

@ -20,7 +20,7 @@ class JSONString(Scalar):
return json.dumps(dt)
@staticmethod
def parse_literal(node):
def parse_literal(node, _variables=None):
if isinstance(node, StringValueNode):
return json.loads(node.value)

View File

@ -76,7 +76,6 @@ class Mutation(ObjectType):
):
if not _meta:
_meta = MutationOptions(cls)
output = output or getattr(cls, "Output", None)
fields = {}
@ -85,43 +84,32 @@ class Mutation(ObjectType):
interface, Interface
), f'All interfaces of {cls.__name__} must be a subclass of Interface. Received "{interface}".'
fields.update(interface._meta.fields)
if not output:
# If output is defined, we don't need to get the fields
fields = {}
for base in reversed(cls.__mro__):
fields.update(yank_fields_from_attrs(base.__dict__, _as=Field))
output = cls
if not arguments:
input_class = getattr(cls, "Arguments", None)
if not input_class:
input_class = getattr(cls, "Input", None)
if input_class:
warn_deprecation(
(
f"Please use {cls.__name__}.Arguments instead of {cls.__name__}.Input."
" Input is now only used in ClientMutationID.\n"
"Read more:"
" https://github.com/graphql-python/graphene/blob/v2.0.0/UPGRADE-v2.0.md#mutation-input"
)
f"Please use {cls.__name__}.Arguments instead of {cls.__name__}.Input."
" Input is now only used in ClientMutationID.\n"
"Read more:"
" https://github.com/graphql-python/graphene/blob/v2.0.0/UPGRADE-v2.0.md#mutation-input"
)
if input_class:
arguments = props(input_class)
else:
arguments = {}
arguments = props(input_class) if input_class else {}
if not resolver:
mutate = getattr(cls, "mutate", None)
assert mutate, "All mutations must define a mutate method in it"
resolver = get_unbound_function(mutate)
if _meta.fields:
_meta.fields.update(fields)
else:
_meta.fields = fields
_meta.interfaces = interfaces
_meta.output = output
_meta.resolver = resolver
@ -133,7 +121,7 @@ class Mutation(ObjectType):
def Field(
cls, name=None, description=None, deprecation_reason=None, required=False
):
""" Mount instance of mutation Field. """
"""Mount instance of mutation Field."""
return Field(
cls._meta.output,
args=cls._meta.arguments,

View File

@ -7,7 +7,6 @@ try:
from dataclasses import make_dataclass, field
except ImportError:
from ..pyutils.dataclasses import make_dataclass, field # type: ignore
# For static type checking with Mypy
MYPY = False
if MYPY:
@ -28,7 +27,7 @@ class ObjectTypeMeta(BaseTypeMeta):
pass
base_cls = super().__new__(
cls, name_, (InterObjectType,) + bases, namespace, **options,
cls, name_, (InterObjectType,) + bases, namespace, **options
)
if base_cls._meta:
fields = [
@ -66,7 +65,7 @@ class ObjectType(BaseType, metaclass=ObjectTypeMeta):
Methods starting with ``resolve_<field_name>`` are bound as resolvers of the matching Field
name. If no resolver is provided, the default resolver is used.
Ambiguous types with Interface and Union can be determined through``is_type_of`` method and
Ambiguous types with Interface and Union can be determined through ``is_type_of`` method and
``Meta.possible_types`` attribute.
.. code:: python
@ -133,7 +132,6 @@ class ObjectType(BaseType, metaclass=ObjectTypeMeta):
):
if not _meta:
_meta = ObjectTypeOptions(cls)
fields = {}
for interface in interfaces:
@ -141,10 +139,8 @@ class ObjectType(BaseType, metaclass=ObjectTypeMeta):
interface, Interface
), f'All interfaces of {cls.__name__} must be a subclass of Interface. Received "{interface}".'
fields.update(interface._meta.fields)
for base in reversed(cls.__mro__):
fields.update(yank_fields_from_attrs(base.__dict__, _as=Field))
assert not (possible_types and cls.is_type_of), (
f"{cls.__name__}.Meta.possible_types will cause type collision with {cls.__name__}.is_type_of. "
"Please use one or other."
@ -154,7 +150,6 @@ class ObjectType(BaseType, metaclass=ObjectTypeMeta):
_meta.fields.update(fields)
else:
_meta.fields = fields
if not _meta.interfaces:
_meta.interfaces = interfaces
_meta.possible_types = possible_types

View File

@ -7,9 +7,7 @@ def dict_resolver(attname, default_value, root, info, **args):
def dict_or_attr_resolver(attname, default_value, root, info, **args):
resolver = attr_resolver
if isinstance(root, dict):
resolver = dict_resolver
resolver = dict_resolver if isinstance(root, dict) else attr_resolver
return resolver(attname, default_value, root, info, **args)

View File

@ -75,7 +75,7 @@ class Int(Scalar):
parse_value = coerce_int
@staticmethod
def parse_literal(ast):
def parse_literal(ast, _variables=None):
if isinstance(ast, IntValueNode):
num = int(ast.value)
if MIN_INT <= num <= MAX_INT:
@ -104,7 +104,7 @@ class BigInt(Scalar):
parse_value = coerce_int
@staticmethod
def parse_literal(ast):
def parse_literal(ast, _variables=None):
if isinstance(ast, IntValueNode):
return int(ast.value)
@ -128,7 +128,7 @@ class Float(Scalar):
parse_value = coerce_float
@staticmethod
def parse_literal(ast):
def parse_literal(ast, _variables=None):
if isinstance(ast, (FloatValueNode, IntValueNode)):
return float(ast.value)
@ -143,14 +143,14 @@ class String(Scalar):
@staticmethod
def coerce_string(value):
if isinstance(value, bool):
return u"true" if value else u"false"
return "true" if value else "false"
return str(value)
serialize = coerce_string
parse_value = coerce_string
@staticmethod
def parse_literal(ast):
def parse_literal(ast, _variables=None):
if isinstance(ast, StringValueNode):
return ast.value
@ -164,7 +164,7 @@ class Boolean(Scalar):
parse_value = bool
@staticmethod
def parse_literal(ast):
def parse_literal(ast, _variables=None):
if isinstance(ast, BooleanValueNode):
return ast.value
@ -182,6 +182,6 @@ class ID(Scalar):
parse_value = str
@staticmethod
def parse_literal(ast):
def parse_literal(ast, _variables=None):
if isinstance(ast, (StringValueNode, IntValueNode)):
return ast.value

View File

@ -26,10 +26,7 @@ from graphql import (
GraphQLObjectType,
GraphQLSchema,
GraphQLString,
Undefined,
)
from graphql.execution import ExecutionContext
from graphql.execution.values import get_argument_values
from ..utils.str_converters import to_camel_case
from ..utils.get_unbound_function import get_unbound_function
@ -313,9 +310,7 @@ class TypeMap(dict):
arg_type,
out_name=arg_name,
description=arg.description,
default_value=Undefined
if isinstance(arg.type, NonNull)
else arg.default_value,
default_value=arg.default_value,
)
subscribe = field.wrap_subscribe(
self.get_function_for_type(
@ -381,123 +376,18 @@ class TypeMap(dict):
def resolve_type(self, resolve_type_func, type_name, root, info, _type):
type_ = resolve_type_func(root, info)
if not type_:
return_type = self[type_name]
return default_type_resolver(root, info, return_type)
if inspect.isclass(type_) and issubclass(type_, ObjectType):
graphql_type = self.get(type_._meta.name)
assert graphql_type, f"Can't find type {type_._meta.name} in schema"
assert (
graphql_type.graphene_type == type_
), f"The type {type_} does not match with the associated graphene type {graphql_type.graphene_type}."
return graphql_type
return type_._meta.name
return type_
class UnforgivingExecutionContext(ExecutionContext):
"""An execution context which doesn't swallow exceptions.
The only difference between this execution context and the one it inherits from is
that ``except Exception`` is commented out within ``resolve_field_value_or_error``.
By removing that exception handling, only ``GraphQLError``'s are caught.
"""
def resolve_field_value_or_error(
self, field_def, field_nodes, resolve_fn, source, info
):
"""Resolve field to a value or an error.
Isolates the "ReturnOrAbrupt" behavior to not de-opt the resolve_field()
method. Returns the result of resolveFn or the abrupt-return Error object.
For internal use only.
"""
try:
# Build a dictionary of arguments from the field.arguments AST, using the
# variables scope to fulfill any variable references.
args = get_argument_values(field_def, field_nodes[0], self.variable_values)
# Note that contrary to the JavaScript implementation, we pass the context
# value as part of the resolve info.
result = resolve_fn(source, info, **args)
if self.is_awaitable(result):
# noinspection PyShadowingNames
async def await_result():
try:
return await result
except GraphQLError as error:
return error
# except Exception as error:
# return GraphQLError(str(error), original_error=error)
# Yes, this is commented out code. It's been intentionally
# _not_ removed to show what has changed from the original
# implementation.
return await_result()
return result
except GraphQLError as error:
return error
# except Exception as error:
# return GraphQLError(str(error), original_error=error)
# Yes, this is commented out code. It's been intentionally _not_
# removed to show what has changed from the original implementation.
def complete_value_catching_error(
self, return_type, field_nodes, info, path, result
):
"""Complete a value while catching an error.
This is a small wrapper around completeValue which detects and logs errors in
the execution context.
"""
try:
if self.is_awaitable(result):
async def await_result():
value = self.complete_value(
return_type, field_nodes, info, path, await result
)
if self.is_awaitable(value):
return await value
return value
completed = await_result()
else:
completed = self.complete_value(
return_type, field_nodes, info, path, result
)
if self.is_awaitable(completed):
# noinspection PyShadowingNames
async def await_completed():
try:
return await completed
# CHANGE WAS MADE HERE
# ``GraphQLError`` was swapped in for ``except Exception``
except GraphQLError as error:
self.handle_field_error(error, field_nodes, path, return_type)
return await_completed()
return completed
# CHANGE WAS MADE HERE
# ``GraphQLError`` was swapped in for ``except Exception``
except GraphQLError as error:
self.handle_field_error(error, field_nodes, path, return_type)
return None
return_type = self[type_name]
return default_type_resolver(root, info, return_type)
class Schema:
"""Schema Definition.
A Graphene Schema can execute operations (query, mutation, subscription) against the defined
types. For advanced purposes, the schema can be used to lookup type definitions and answer
questions about the types through introspection.
Args:
query (Type[ObjectType]): Root query *ObjectType*. Describes entry point for fields to *read*
data in your Schema.
@ -544,7 +434,6 @@ class Schema:
"""
This function let the developer select a type in a given schema
by accessing its attrs.
Example: using schema.Query for accessing the "Query" type in the Schema
"""
_type = self.graphql_schema.get_type(type_name)
@ -559,11 +448,9 @@ class Schema:
def execute(self, *args, **kwargs):
"""Execute a GraphQL query on the schema.
Use the `graphql_sync` function from `graphql-core` to provide the result
for a query string. Most of the time this method will be called by one of the Graphene
:ref:`Integrations` via a web request.
Args:
request_string (str or Document): GraphQL request (query, mutation or subscription)
as string or parsed AST form from `graphql-core`.
@ -580,7 +467,6 @@ class Schema:
defined in `graphql-core`.
execution_context_class (ExecutionContext, optional): The execution context class
to use when resolving queries and mutations.
Returns:
:obj:`ExecutionResult` containing any data and errors for the operation.
"""
@ -589,7 +475,6 @@ class Schema:
async def execute_async(self, *args, **kwargs):
"""Execute a GraphQL query on the schema asynchronously.
Same as `execute`, but uses `graphql` instead of `graphql_sync`.
"""
kwargs = normalize_execute_kwargs(kwargs)

View File

@ -64,15 +64,11 @@ def test_base64_query_none():
def test_base64_query_invalid():
bad_inputs = [
dict(),
123,
"This is not valid base64",
]
bad_inputs = [dict(), 123, "This is not valid base64"]
for input_ in bad_inputs:
result = schema.execute(
"""{ base64(input: $input) }""", variables={"input": input_},
"""{ base64(input: $input) }""", variables={"input": input_}
)
assert isinstance(result.errors, list)
assert len(result.errors) == 1

View File

@ -26,8 +26,8 @@ def test_enum_construction():
assert RGB._meta.description == "Description"
values = RGB._meta.enum.__members__.values()
assert sorted([v.name for v in values]) == ["BLUE", "GREEN", "RED"]
assert sorted([v.description for v in values]) == [
assert sorted(v.name for v in values) == ["BLUE", "GREEN", "RED"]
assert sorted(v.description for v in values) == [
"Description BLUE",
"Description GREEN",
"Description RED",
@ -52,7 +52,7 @@ def test_enum_instance_construction():
RGB = Enum("RGB", "RED,GREEN,BLUE")
values = RGB._meta.enum.__members__.values()
assert sorted([v.name for v in values]) == ["BLUE", "GREEN", "RED"]
assert sorted(v.name for v in values) == ["BLUE", "GREEN", "RED"]
def test_enum_from_builtin_enum():
@ -251,19 +251,22 @@ def test_enum_types():
schema = Schema(query=Query)
assert str(schema) == dedent(
'''\
type Query {
color: Color!
}
assert (
str(schema).strip()
== dedent(
'''
type Query {
color: Color!
}
"""Primary colors"""
enum Color {
RED
YELLOW
BLUE
}
'''
"""Primary colors"""
enum Color {
RED
YELLOW
BLUE
}
'''
).strip()
)
@ -325,6 +328,52 @@ def test_enum_resolver_compat():
assert results.data["colorByName"] == Color.RED.name
def test_enum_with_name():
from enum import Enum as PyEnum
class Color(PyEnum):
RED = 1
YELLOW = 2
BLUE = 3
GColor = Enum.from_enum(Color, description="original colors")
UniqueGColor = Enum.from_enum(
Color, name="UniqueColor", description="unique colors"
)
class Query(ObjectType):
color = GColor(required=True)
unique_color = UniqueGColor(required=True)
schema = Schema(query=Query)
assert (
str(schema).strip()
== dedent(
'''
type Query {
color: Color!
uniqueColor: UniqueColor!
}
"""original colors"""
enum Color {
RED
YELLOW
BLUE
}
"""unique colors"""
enum UniqueColor {
RED
YELLOW
BLUE
}
'''
).strip()
)
def test_enum_resolver_invalid():
from enum import Enum as PyEnum
@ -345,10 +394,7 @@ def test_enum_resolver_invalid():
results = schema.execute("query { color }")
assert results.errors
assert (
results.errors[0].message
== "Expected a value of type 'Color' but received: 'BLACK'"
)
assert results.errors[0].message == "Enum 'Color' cannot represent value: 'BLACK'"
def test_field_enum_argument():
@ -460,12 +506,13 @@ def test_mutation_enum_input_type():
schema = Schema(query=Query, mutation=MyMutation)
result = schema.execute(
""" mutation MyMutation {
createPaint(colorInput: { color: RED }) {
color
"""
mutation MyMutation {
createPaint(colorInput: { color: RED }) {
color
}
}
}
""",
"""
)
assert not result.errors
assert result.data == {"createPaint": {"color": "RED"}}

View File

@ -191,21 +191,15 @@ def test_objecttype_as_container_all_kwargs():
def test_objecttype_as_container_extra_args():
with raises(TypeError) as excinfo:
Container("1", "2", "3")
assert "__init__() takes from 1 to 3 positional arguments but 4 were given" == str(
excinfo.value
)
msg = r"__init__\(\) takes from 1 to 3 positional arguments but 4 were given"
with raises(TypeError, match=msg):
Container("1", "2", "3") # type: ignore
def test_objecttype_as_container_invalid_kwargs():
with raises(TypeError) as excinfo:
Container(unexisting_field="3")
assert "__init__() got an unexpected keyword argument 'unexisting_field'" == str(
excinfo.value
)
msg = r"__init__\(\) got an unexpected keyword argument 'unexisting_field'"
with raises(TypeError, match=msg):
Container(unexisting_field="3") # type: ignore
def test_objecttype_container_benchmark(benchmark):

View File

@ -229,11 +229,11 @@ def test_query_arguments():
result = test_schema.execute("{ test }", None)
assert not result.errors
assert result.data == {"test": '[null,{"a_str":null,"a_int":null}]'}
assert result.data == {"test": "[null,{}]"}
result = test_schema.execute('{ test(aStr: "String!") }', "Source!")
assert not result.errors
assert result.data == {"test": '["Source!",{"a_str":"String!","a_int":null}]'}
assert result.data == {"test": '["Source!",{"a_str":"String!"}]'}
result = test_schema.execute('{ test(aInt: -123, aStr: "String!") }', "Source!")
assert not result.errors
@ -258,7 +258,7 @@ def test_query_input_field():
result = test_schema.execute("{ test }", None)
assert not result.errors
assert result.data == {"test": '[null,{"a_input":null}]'}
assert result.data == {"test": "[null,{}]"}
result = test_schema.execute('{ test(aInput: {aField: "String!"} ) }', "Source!")
assert not result.errors

View File

@ -11,19 +11,19 @@ def test_scalar():
def test_ints():
assert Int.parse_value(2 ** 31 - 1) is not None
assert Int.parse_value(2**31 - 1) is not None
assert Int.parse_value("2.0") is not None
assert Int.parse_value(2 ** 31) is None
assert Int.parse_value(2**31) is None
assert Int.parse_literal(IntValueNode(value=str(2 ** 31 - 1))) == 2 ** 31 - 1
assert Int.parse_literal(IntValueNode(value=str(2 ** 31))) is None
assert Int.parse_literal(IntValueNode(value=str(2**31 - 1))) == 2**31 - 1
assert Int.parse_literal(IntValueNode(value=str(2**31))) is None
assert Int.parse_value(-(2 ** 31)) is not None
assert Int.parse_value(-(2 ** 31) - 1) is None
assert Int.parse_value(-(2**31)) is not None
assert Int.parse_value(-(2**31) - 1) is None
assert BigInt.parse_value(2 ** 31) is not None
assert BigInt.parse_value(2**31) is not None
assert BigInt.parse_value("2.0") is not None
assert BigInt.parse_value(-(2 ** 31) - 1) is not None
assert BigInt.parse_value(-(2**31) - 1) is not None
assert BigInt.parse_literal(IntValueNode(value=str(2 ** 31 - 1))) == 2 ** 31 - 1
assert BigInt.parse_literal(IntValueNode(value=str(2 ** 31))) == 2 ** 31
assert BigInt.parse_literal(IntValueNode(value=str(2**31 - 1))) == 2**31 - 1
assert BigInt.parse_literal(IntValueNode(value=str(2**31))) == 2**31

View File

@ -38,7 +38,7 @@ def test_serializes_output_string():
assert String.serialize(-1.1) == "-1.1"
assert String.serialize(True) == "true"
assert String.serialize(False) == "false"
assert String.serialize(u"\U0001F601") == u"\U0001F601"
assert String.serialize("\U0001F601") == "\U0001F601"
def test_serializes_output_boolean():

View File

@ -1,13 +1,13 @@
from graphql.type import GraphQLObjectType, GraphQLSchema
from graphql import GraphQLError
from pytest import mark, raises, fixture
from textwrap import dedent
from graphene.tests.utils import dedent
from pytest import raises
from graphql.type import GraphQLObjectType, GraphQLSchema
from ..field import Field
from ..objecttype import ObjectType
from ..scalars import String
from ..schema import Schema, UnforgivingExecutionContext
from ..schema import Schema
class MyOtherType(ObjectType):
@ -44,8 +44,10 @@ def test_schema_get_type_error():
def test_schema_str():
schema = Schema(Query)
assert str(schema) == dedent(
"""
assert (
str(schema).strip()
== dedent(
"""
type Query {
inner: MyOtherType
}
@ -54,6 +56,7 @@ def test_schema_str():
field: String
}
"""
).strip()
)
@ -69,115 +72,3 @@ def test_schema_requires_query_type():
assert len(result.errors) == 1
error = result.errors[0]
assert error.message == "Query root type must be provided."
class TestUnforgivingExecutionContext:
@fixture
def schema(self):
class ErrorFieldsMixin:
sanity_field = String()
expected_error_field = String()
unexpected_value_error_field = String()
unexpected_type_error_field = String()
unexpected_attribute_error_field = String()
unexpected_key_error_field = String()
@staticmethod
def resolve_sanity_field(obj, info):
return "not an error"
@staticmethod
def resolve_expected_error_field(obj, info):
raise GraphQLError("expected error")
@staticmethod
def resolve_unexpected_value_error_field(obj, info):
raise ValueError("unexpected error")
@staticmethod
def resolve_unexpected_type_error_field(obj, info):
raise TypeError("unexpected error")
@staticmethod
def resolve_unexpected_attribute_error_field(obj, info):
raise AttributeError("unexpected error")
@staticmethod
def resolve_unexpected_key_error_field(obj, info):
return {}["fails"]
class NestedObject(ErrorFieldsMixin, ObjectType):
pass
class MyQuery(ErrorFieldsMixin, ObjectType):
nested_object = Field(NestedObject)
nested_object_error = Field(NestedObject)
@staticmethod
def resolve_nested_object(obj, info):
return object()
@staticmethod
def resolve_nested_object_error(obj, info):
raise TypeError()
schema = Schema(query=MyQuery)
return schema
def test_sanity_check(self, schema):
# this should pass with no errors (sanity check)
result = schema.execute(
"query { sanityField }",
execution_context_class=UnforgivingExecutionContext,
)
assert not result.errors
assert result.data == {"sanityField": "not an error"}
def test_nested_sanity_check(self, schema):
# this should pass with no errors (sanity check)
result = schema.execute(
r"query { nestedObject { sanityField } }",
execution_context_class=UnforgivingExecutionContext,
)
assert not result.errors
assert result.data == {"nestedObject": {"sanityField": "not an error"}}
def test_graphql_error(self, schema):
result = schema.execute(
"query { expectedErrorField }",
execution_context_class=UnforgivingExecutionContext,
)
assert len(result.errors) == 1
assert result.errors[0].message == "expected error"
assert result.data == {"expectedErrorField": None}
def test_nested_graphql_error(self, schema):
result = schema.execute(
r"query { nestedObject { expectedErrorField } }",
execution_context_class=UnforgivingExecutionContext,
)
assert len(result.errors) == 1
assert result.errors[0].message == "expected error"
assert result.data == {"nestedObject": {"expectedErrorField": None}}
@mark.parametrize(
"field,exception",
[
("unexpectedValueErrorField", ValueError),
("unexpectedTypeErrorField", TypeError),
("unexpectedAttributeErrorField", AttributeError),
("unexpectedKeyErrorField", KeyError),
("nestedObject { unexpectedValueErrorField }", ValueError),
("nestedObject { unexpectedTypeErrorField }", TypeError),
("nestedObject { unexpectedAttributeErrorField }", AttributeError),
("nestedObject { unexpectedKeyErrorField }", KeyError),
("nestedObjectError { __typename }", TypeError),
],
)
def test_unexpected_error(self, field, exception, schema):
with raises(exception):
# no result, but the exception should be propagated
schema.execute(
f"query {{ {field} }}",
execution_context_class=UnforgivingExecutionContext,
)

View File

@ -14,9 +14,7 @@ class Subscription(ObjectType):
count_to_ten = Field(Int)
async def subscribe_count_to_ten(root, info):
count = 0
while count < 10:
count += 1
for count in range(1, 11):
yield count

View File

@ -1,3 +1,4 @@
from graphql import Undefined
from graphql.type import (
GraphQLArgument,
GraphQLEnumType,
@ -6,6 +7,7 @@ from graphql.type import (
GraphQLInputField,
GraphQLInputObjectType,
GraphQLInterfaceType,
GraphQLNonNull,
GraphQLObjectType,
GraphQLString,
)
@ -94,6 +96,21 @@ def test_objecttype():
}
def test_required_argument_with_default_value():
class MyObjectType(ObjectType):
foo = String(bar=String(required=True, default_value="x"))
type_map = create_type_map([MyObjectType])
graphql_type = type_map["MyObjectType"]
foo_field = graphql_type.fields["foo"]
bar_argument = foo_field.args["bar"]
assert bar_argument.default_value == "x"
assert isinstance(bar_argument.type, GraphQLNonNull)
assert bar_argument.type.of_type == GraphQLString
def test_dynamic_objecttype():
class MyObjectType(ObjectType):
"""Description"""
@ -228,7 +245,9 @@ def test_objecttype_camelcase():
foo_field = fields["fooBar"]
assert isinstance(foo_field, GraphQLField)
assert foo_field.args == {
"barFoo": GraphQLArgument(GraphQLString, default_value=None, out_name="bar_foo")
"barFoo": GraphQLArgument(
GraphQLString, default_value=Undefined, out_name="bar_foo"
)
}
@ -251,7 +270,7 @@ def test_objecttype_camelcase_disabled():
assert isinstance(foo_field, GraphQLField)
assert foo_field.args == {
"bar_foo": GraphQLArgument(
GraphQLString, default_value=None, out_name="bar_foo"
GraphQLString, default_value=Undefined, out_name="bar_foo"
)
}

View File

@ -21,7 +21,7 @@ class Union(UnmountedType, BaseType):
to determine which type is actually used when the field is resolved.
The schema in this example can take a search text and return any of the GraphQL object types
indicated: Human, Droid or Startship.
indicated: Human, Droid or Starship.
Ambiguous return types can be resolved on each ObjectType through ``Meta.possible_types``
attribute or ``is_type_of`` method. Or by implementing ``resolve_type`` class method on the

View File

@ -21,7 +21,7 @@ class UUID(Scalar):
return str(uuid)
@staticmethod
def parse_literal(node):
def parse_literal(node, _variables=None):
if isinstance(node, StringValueNode):
return _UUID(node.value)

View File

@ -0,0 +1,6 @@
def is_introspection_key(key):
# from: https://spec.graphql.org/June2018/#sec-Schema
# > All types and directives defined within a schema must not have a name which
# > begins with "__" (two underscores), as this is used exclusively
# > by GraphQLs introspection system.
return str(key).startswith("__")

View File

@ -27,19 +27,18 @@ def import_string(dotted_path, dotted_attributes=None):
if not dotted_attributes:
return result
else:
attributes = dotted_attributes.split(".")
traveled_attributes = []
try:
for attribute in attributes:
traveled_attributes.append(attribute)
result = getattr(result, attribute)
return result
except AttributeError:
raise ImportError(
'Module "%s" does not define a "%s" attribute inside attribute/class "%s"'
% (module_path, ".".join(traveled_attributes), class_name)
)
attributes = dotted_attributes.split(".")
traveled_attributes = []
try:
for attribute in attributes:
traveled_attributes.append(attribute)
result = getattr(result, attribute)
return result
except AttributeError:
raise ImportError(
'Module "%s" does not define a "%s" attribute inside attribute/class "%s"'
% (module_path, ".".join(traveled_attributes), class_name)
)
def lazy_import(dotted_path, dotted_attributes=None):

View File

@ -36,4 +36,4 @@ class OrderedType:
return NotImplemented
def __hash__(self):
return hash((self.creation_counter))
return hash(self.creation_counter)

View File

@ -94,6 +94,7 @@ TEST_DATA = {
],
"movies": {
"1198359": {
"id": "1198359",
"name": "King Arthur: Legend of the Sword",
"synopsis": (
"When the child Arthur's father is murdered, Vortigern, "
@ -159,7 +160,7 @@ def test_example_end_to_end():
"date": "2017-05-19",
"movie": {
"__typename": "Movie",
"id": "TW92aWU6Tm9uZQ==",
"id": "TW92aWU6MTE5ODM1OQ==",
"name": "King Arthur: Legend of the Sword",
"synopsis": (
"When the child Arthur's father is murdered, Vortigern, "
@ -172,7 +173,7 @@ def test_example_end_to_end():
"__typename": "Event",
"id": "RXZlbnQ6MjM0",
"date": "2017-05-20",
"movie": {"__typename": "Movie", "id": "TW92aWU6Tm9uZQ=="},
"movie": {"__typename": "Movie", "id": "TW92aWU6MTE5ODM1OQ=="},
},
]
}

View File

@ -38,4 +38,4 @@ def test_orderedtype_non_orderabletypes():
assert one.__lt__(1) == NotImplemented
assert one.__gt__(1) == NotImplemented
assert not one == 1
assert one != 1

View File

@ -0,0 +1,5 @@
from .depth_limit import depth_limit_validator
from .disable_introspection import DisableIntrospection
__all__ = ["DisableIntrospection", "depth_limit_validator"]

View File

@ -0,0 +1,195 @@
# This is a Python port of https://github.com/stems/graphql-depth-limit
# which is licensed under the terms of the MIT license, reproduced below.
#
# -----------
#
# MIT License
#
# Copyright (c) 2017 Stem
#
# Permission is hereby granted, free of charge, to any person obtaining a copy
# of this software and associated documentation files (the "Software"), to deal
# in the Software without restriction, including without limitation the rights
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
# copies of the Software, and to permit persons to whom the Software is
# furnished to do so, subject to the following conditions:
#
# The above copyright notice and this permission notice shall be included in all
# copies or substantial portions of the Software.
#
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
# SOFTWARE.
try:
from re import Pattern
except ImportError:
# backwards compatibility for v3.6
from typing import Pattern
from typing import Callable, Dict, List, Optional, Union
from graphql import GraphQLError
from graphql.validation import ValidationContext, ValidationRule
from graphql.language import (
DefinitionNode,
FieldNode,
FragmentDefinitionNode,
FragmentSpreadNode,
InlineFragmentNode,
Node,
OperationDefinitionNode,
)
from ..utils.is_introspection_key import is_introspection_key
IgnoreType = Union[Callable[[str], bool], Pattern, str]
def depth_limit_validator(
max_depth: int,
ignore: Optional[List[IgnoreType]] = None,
callback: Callable[[Dict[str, int]], None] = None,
):
class DepthLimitValidator(ValidationRule):
def __init__(self, validation_context: ValidationContext):
document = validation_context.document
definitions = document.definitions
fragments = get_fragments(definitions)
queries = get_queries_and_mutations(definitions)
query_depths = {}
for name in queries:
query_depths[name] = determine_depth(
node=queries[name],
fragments=fragments,
depth_so_far=0,
max_depth=max_depth,
context=validation_context,
operation_name=name,
ignore=ignore,
)
if callable(callback):
callback(query_depths)
super().__init__(validation_context)
return DepthLimitValidator
def get_fragments(
definitions: List[DefinitionNode],
) -> Dict[str, FragmentDefinitionNode]:
fragments = {}
for definition in definitions:
if isinstance(definition, FragmentDefinitionNode):
fragments[definition.name.value] = definition
return fragments
# This will actually get both queries and mutations.
# We can basically treat those the same
def get_queries_and_mutations(
definitions: List[DefinitionNode],
) -> Dict[str, OperationDefinitionNode]:
operations = {}
for definition in definitions:
if isinstance(definition, OperationDefinitionNode):
operation = definition.name.value if definition.name else "anonymous"
operations[operation] = definition
return operations
def determine_depth(
node: Node,
fragments: Dict[str, FragmentDefinitionNode],
depth_so_far: int,
max_depth: int,
context: ValidationContext,
operation_name: str,
ignore: Optional[List[IgnoreType]] = None,
) -> int:
if depth_so_far > max_depth:
context.report_error(
GraphQLError(
f"'{operation_name}' exceeds maximum operation depth of {max_depth}.",
[node],
)
)
return depth_so_far
if isinstance(node, FieldNode):
should_ignore = is_introspection_key(node.name.value) or is_ignored(
node, ignore
)
if should_ignore or not node.selection_set:
return 0
return 1 + max(
map(
lambda selection: determine_depth(
node=selection,
fragments=fragments,
depth_so_far=depth_so_far + 1,
max_depth=max_depth,
context=context,
operation_name=operation_name,
ignore=ignore,
),
node.selection_set.selections,
)
)
elif isinstance(node, FragmentSpreadNode):
return determine_depth(
node=fragments[node.name.value],
fragments=fragments,
depth_so_far=depth_so_far,
max_depth=max_depth,
context=context,
operation_name=operation_name,
ignore=ignore,
)
elif isinstance(
node, (InlineFragmentNode, FragmentDefinitionNode, OperationDefinitionNode)
):
return max(
map(
lambda selection: determine_depth(
node=selection,
fragments=fragments,
depth_so_far=depth_so_far,
max_depth=max_depth,
context=context,
operation_name=operation_name,
ignore=ignore,
),
node.selection_set.selections,
)
)
else:
raise Exception(
f"Depth crawler cannot handle: {node.kind}."
) # pragma: no cover
def is_ignored(node: FieldNode, ignore: Optional[List[IgnoreType]] = None) -> bool:
if ignore is None:
return False
for rule in ignore:
field_name = node.name.value
if isinstance(rule, str):
if field_name == rule:
return True
elif isinstance(rule, Pattern):
if rule.match(field_name):
return True
elif callable(rule):
if rule(field_name):
return True
else:
raise ValueError(f"Invalid ignore option: {rule}.")
return False

View File

@ -0,0 +1,16 @@
from graphql import GraphQLError
from graphql.language import FieldNode
from graphql.validation import ValidationRule
from ..utils.is_introspection_key import is_introspection_key
class DisableIntrospection(ValidationRule):
def enter_field(self, node: FieldNode, *_args):
field_name = node.name.value
if is_introspection_key(field_name):
self.report_error(
GraphQLError(
f"Cannot query '{field_name}': introspection is disabled.", node
)
)

View File

View File

@ -0,0 +1,254 @@
import re
from pytest import raises
from graphql import parse, get_introspection_query, validate
from ...types import Schema, ObjectType, Interface
from ...types import String, Int, List, Field
from ..depth_limit import depth_limit_validator
class PetType(Interface):
name = String(required=True)
class meta:
name = "Pet"
class CatType(ObjectType):
class meta:
name = "Cat"
interfaces = (PetType,)
class DogType(ObjectType):
class meta:
name = "Dog"
interfaces = (PetType,)
class AddressType(ObjectType):
street = String(required=True)
number = Int(required=True)
city = String(required=True)
country = String(required=True)
class Meta:
name = "Address"
class HumanType(ObjectType):
name = String(required=True)
email = String(required=True)
address = Field(AddressType, required=True)
pets = List(PetType, required=True)
class Meta:
name = "Human"
class Query(ObjectType):
user = Field(HumanType, required=True, name=String())
version = String(required=True)
user1 = Field(HumanType, required=True)
user2 = Field(HumanType, required=True)
user3 = Field(HumanType, required=True)
@staticmethod
def resolve_user(root, info, name=None):
pass
schema = Schema(query=Query)
def run_query(query: str, max_depth: int, ignore=None):
document = parse(query)
result = None
def callback(query_depths):
nonlocal result
result = query_depths
errors = validate(
schema=schema.graphql_schema,
document_ast=document,
rules=(
depth_limit_validator(
max_depth=max_depth, ignore=ignore, callback=callback
),
),
)
return errors, result
def test_should_count_depth_without_fragment():
query = """
query read0 {
version
}
query read1 {
version
user {
name
}
}
query read2 {
matt: user(name: "matt") {
email
}
andy: user(name: "andy") {
email
address {
city
}
}
}
query read3 {
matt: user(name: "matt") {
email
}
andy: user(name: "andy") {
email
address {
city
}
pets {
name
owner {
name
}
}
}
}
"""
expected = {"read0": 0, "read1": 1, "read2": 2, "read3": 3}
errors, result = run_query(query, 10)
assert not errors
assert result == expected
def test_should_count_with_fragments():
query = """
query read0 {
... on Query {
version
}
}
query read1 {
version
user {
... on Human {
name
}
}
}
fragment humanInfo on Human {
email
}
fragment petInfo on Pet {
name
owner {
name
}
}
query read2 {
matt: user(name: "matt") {
...humanInfo
}
andy: user(name: "andy") {
...humanInfo
address {
city
}
}
}
query read3 {
matt: user(name: "matt") {
...humanInfo
}
andy: user(name: "andy") {
... on Human {
email
}
address {
city
}
pets {
...petInfo
}
}
}
"""
expected = {"read0": 0, "read1": 1, "read2": 2, "read3": 3}
errors, result = run_query(query, 10)
assert not errors
assert result == expected
def test_should_ignore_the_introspection_query():
errors, result = run_query(get_introspection_query(), 10)
assert not errors
assert result == {"IntrospectionQuery": 0}
def test_should_catch_very_deep_query():
query = """{
user {
pets {
owner {
pets {
owner {
pets {
name
}
}
}
}
}
}
}
"""
errors, result = run_query(query, 4)
assert len(errors) == 1
assert errors[0].message == "'anonymous' exceeds maximum operation depth of 4."
def test_should_ignore_field():
query = """
query read1 {
user { address { city } }
}
query read2 {
user1 { address { city } }
user2 { address { city } }
user3 { address { city } }
}
"""
errors, result = run_query(
query,
10,
ignore=["user1", re.compile("user2"), lambda field_name: field_name == "user3"],
)
expected = {"read1": 2, "read2": 0}
assert not errors
assert result == expected
def test_should_raise_invalid_ignore():
query = """
query read1 {
user { address { city } }
}
"""
with raises(ValueError, match="Invalid ignore option:"):
run_query(query, 10, ignore=[True])

View File

@ -0,0 +1,37 @@
from graphql import parse, validate
from ...types import Schema, ObjectType, String
from ..disable_introspection import DisableIntrospection
class Query(ObjectType):
name = String(required=True)
@staticmethod
def resolve_name(root, info):
return "Hello world!"
schema = Schema(query=Query)
def run_query(query: str):
document = parse(query)
return validate(
schema=schema.graphql_schema,
document_ast=document,
rules=(DisableIntrospection,),
)
def test_disallows_introspection_queries():
errors = run_query("{ __schema { queryType { name } } }")
assert len(errors) == 1
assert errors[0].message == "Cannot query '__schema': introspection is disabled."
def test_allows_non_introspection_queries():
errors = run_query("{ name }")
assert len(errors) == 0

View File

@ -45,20 +45,20 @@ class PyTest(TestCommand):
tests_require = [
"pytest>=5.3,<6",
"pytest-benchmark>=3.2,<4",
"pytest-cov>=2.8,<3",
"pytest-mock>=2,<3",
"pytest-asyncio>=0.10,<2",
"snapshottest>=0.5,<1",
"coveralls>=1.11,<2",
"pytest>=6,<7",
"pytest-benchmark>=3.4,<4",
"pytest-cov>=3,<4",
"pytest-mock>=3,<4",
"pytest-asyncio>=0.16,<2",
"snapshottest>=0.6,<1",
"coveralls>=3.3,<4",
"promise>=2.3,<3",
"mock>=4.0,<5",
"pytz==2019.3",
"iso8601>=0.1,<2",
"mock>=4,<5",
"pytz==2022.1",
"iso8601>=1,<2",
]
dev_requires = ["black==19.10b0", "flake8>=3.7,<4"] + tests_require
dev_requires = ["black==22.3.0", "flake8>=4,<5"] + tests_require
setup(
name="graphene",
@ -78,13 +78,15 @@ setup(
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
],
keywords="api graphql protocol rest relay graphene",
packages=find_packages(exclude=["examples*"]),
install_requires=[
"graphql-core>=3.1.2,<4",
"graphql-relay>=3.0,<4",
"aniso8601>=8,<9",
"graphql-core>=3.1,<3.3",
"graphql-relay>=3.1,<3.3",
"aniso8601>=8,<10",
],
tests_require=tests_require,
extras_require={"test": tests_require, "dev": dev_requires},

18
tox.ini
View File

@ -1,5 +1,5 @@
[tox]
envlist = flake8,py36,py37,py38,pre-commit,mypy
envlist = py3{6,7,8,9,10}, flake8, mypy, pre-commit
skipsdist = true
[testenv]
@ -8,28 +8,28 @@ deps =
setenv =
PYTHONPATH = .:{envdir}
commands =
py{36,37,38}: pytest --cov=graphene graphene examples {posargs}
py{36,37,38,39,310}: pytest --cov=graphene graphene --cov-report=term --cov-report=xml examples {posargs}
[testenv:pre-commit]
basepython=python3.7
basepython = python3.9
deps =
pre-commit>=2,<3
pre-commit>=2.16,<3
setenv =
LC_CTYPE=en_US.UTF-8
commands =
pre-commit {posargs:run --all-files}
pre-commit run --all-files --show-diff-on-failure
[testenv:mypy]
basepython=python3.7
basepython = python3.9
deps =
mypy>=0.761,<1
mypy>=0.950,<1
commands =
mypy graphene
[testenv:flake8]
basepython=python3.7
basepython = python3.9
deps =
flake8>=3.7,<4
flake8>=4,<5
commands =
pip install --pre -e .
flake8 graphene