mirror of
https://github.com/graphql-python/graphene.git
synced 2024-11-10 19:56:45 +03:00
Merge branch 'master' into fix/ariadne-link
This commit is contained in:
commit
d96ec55abb
25
.github/workflows/coveralls.yml
vendored
25
.github/workflows/coveralls.yml
vendored
|
@ -1,25 +0,0 @@
|
|||
name: 📊 Check Coverage
|
||||
on:
|
||||
push:
|
||||
branches:
|
||||
- master
|
||||
- '*.x'
|
||||
paths-ignore:
|
||||
- 'docs/**'
|
||||
- '*.md'
|
||||
- '*.rst'
|
||||
pull_request:
|
||||
branches:
|
||||
- master
|
||||
- '*.x'
|
||||
paths-ignore:
|
||||
- 'docs/**'
|
||||
- '*.md'
|
||||
- '*.rst'
|
||||
jobs:
|
||||
coveralls_finish:
|
||||
# check coverage increase/decrease
|
||||
runs-on: ubuntu-latest
|
||||
steps:
|
||||
- name: Coveralls Finished
|
||||
uses: AndreMiras/coveralls-python-action@develop
|
4
.github/workflows/deploy.yml
vendored
4
.github/workflows/deploy.yml
vendored
|
@ -11,10 +11,10 @@ jobs:
|
|||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python 3.8
|
||||
- name: Set up Python 3.9
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
python-version: 3.9
|
||||
- name: Build wheel and source tarball
|
||||
run: |
|
||||
pip install wheel
|
||||
|
|
4
.github/workflows/lint.yml
vendored
4
.github/workflows/lint.yml
vendored
|
@ -8,10 +8,10 @@ jobs:
|
|||
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- name: Set up Python 3.8
|
||||
- name: Set up Python 3.9
|
||||
uses: actions/setup-python@v2
|
||||
with:
|
||||
python-version: 3.8
|
||||
python-version: 3.9
|
||||
- name: Install dependencies
|
||||
run: |
|
||||
python -m pip install --upgrade pip
|
||||
|
|
24
.github/workflows/tests.yml
vendored
24
.github/workflows/tests.yml
vendored
|
@ -25,12 +25,14 @@ jobs:
|
|||
fail-fast: false
|
||||
matrix:
|
||||
include:
|
||||
- {name: '3.8', python: '3.8', os: ubuntu-latest, tox: py38}
|
||||
- {name: '3.7', python: '3.7', os: ubuntu-latest, tox: py37}
|
||||
- {name: '3.6', python: '3.6', os: ubuntu-latest, tox: py36}
|
||||
- {name: '3.10', python: '3.10', os: ubuntu-latest, tox: py310}
|
||||
- {name: '3.9', python: '3.9', os: ubuntu-latest, tox: py39}
|
||||
- { name: '3.8', python: '3.8', os: ubuntu-latest, tox: py38 }
|
||||
- { name: '3.7', python: '3.7', os: ubuntu-latest, tox: py37 }
|
||||
- { name: '3.6', python: '3.6', os: ubuntu-latest, tox: py36 }
|
||||
steps:
|
||||
- uses: actions/checkout@v2
|
||||
- uses: actions/setup-python@v2
|
||||
- uses: actions/checkout@v3
|
||||
- uses: actions/setup-python@v3
|
||||
with:
|
||||
python-version: ${{ matrix.python }}
|
||||
|
||||
|
@ -45,10 +47,20 @@ jobs:
|
|||
run: echo "::set-output name=dir::$(pip cache dir)"
|
||||
|
||||
- name: cache pip dependencies
|
||||
uses: actions/cache@v2
|
||||
uses: actions/cache@v3
|
||||
with:
|
||||
path: ${{ steps.pip-cache.outputs.dir }}
|
||||
key: pip|${{ runner.os }}|${{ matrix.python }}|${{ hashFiles('setup.py') }}
|
||||
|
||||
- run: pip install tox
|
||||
- run: tox -e ${{ matrix.tox }}
|
||||
- name: Upload coverage.xml
|
||||
if: ${{ matrix.python == '3.10' }}
|
||||
uses: actions/upload-artifact@v3
|
||||
with:
|
||||
name: graphene-sqlalchemy-coverage
|
||||
path: coverage.xml
|
||||
if-no-files-found: error
|
||||
- name: Upload coverage.xml to codecov
|
||||
if: ${{ matrix.python == '3.10' }}
|
||||
uses: codecov/codecov-action@v3
|
||||
|
|
|
@ -1,9 +1,9 @@
|
|||
default_language_version:
|
||||
python: python3.8
|
||||
python: python3.9
|
||||
|
||||
repos:
|
||||
- repo: git://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v2.3.0
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.3.0
|
||||
hooks:
|
||||
- id: check-merge-conflict
|
||||
- id: check-json
|
||||
|
@ -16,15 +16,15 @@ repos:
|
|||
- --autofix
|
||||
- id: trailing-whitespace
|
||||
exclude: README.md
|
||||
- repo: git://github.com/asottile/pyupgrade
|
||||
rev: v2.24.0
|
||||
- repo: https://github.com/asottile/pyupgrade
|
||||
rev: v2.37.3
|
||||
hooks:
|
||||
- id: pyupgrade
|
||||
- repo: git://github.com/ambv/black
|
||||
rev: 19.3b0
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 22.6.0
|
||||
hooks:
|
||||
- id: black
|
||||
- repo: git://github.com/PyCQA/flake8
|
||||
rev: 3.8.4
|
||||
- repo: https://github.com/PyCQA/flake8
|
||||
rev: 5.0.4
|
||||
hooks:
|
||||
- id: flake8
|
||||
|
|
10
README.md
10
README.md
|
@ -4,12 +4,6 @@
|
|||
|
||||
**We are looking for contributors**! Please check the [ROADMAP](https://github.com/graphql-python/graphene/blob/master/ROADMAP.md) to see how you can help ❤️
|
||||
|
||||
---
|
||||
|
||||
**The below readme is the documentation for the `dev` (prerelease) version of Graphene. To view the documentation for the latest stable Graphene version go to the [v2 docs](https://docs.graphene-python.org/en/stable/)**
|
||||
|
||||
---
|
||||
|
||||
## Introduction
|
||||
|
||||
[Graphene](http://graphene-python.org) is an opinionated Python library for building GraphQL schemas/types fast and easily.
|
||||
|
@ -34,10 +28,10 @@ Also, Graphene is fully compatible with the GraphQL spec, working seamlessly wit
|
|||
|
||||
## Installation
|
||||
|
||||
For instaling graphene, just run this command in your shell
|
||||
To install `graphene`, just run this command in your shell
|
||||
|
||||
```bash
|
||||
pip install "graphene>=2.0"
|
||||
pip install "graphene>=3.0"
|
||||
```
|
||||
|
||||
## Examples
|
||||
|
|
93
README.rst
93
README.rst
|
@ -1,18 +1,18 @@
|
|||
|Graphene Logo| `Graphene <http://graphene-python.org>`__ |Build Status| |PyPI version| |Coverage Status|
|
||||
=========================================================================================================
|
||||
|
||||
`💬 Join the community on
|
||||
Slack <https://join.slack.com/t/graphenetools/shared_invite/enQtOTE2MDQ1NTg4MDM1LTA4Nzk0MGU0NGEwNzUxZGNjNDQ4ZjAwNDJjMjY0OGE1ZDgxZTg4YjM2ZTc4MjE2ZTAzZjE2ZThhZTQzZTkyMmM>`__
|
||||
|
||||
**We are looking for contributors**! Please check the
|
||||
`ROADMAP <https://github.com/graphql-python/graphene/blob/master/ROADMAP.md>`__
|
||||
to see how you can help ❤️
|
||||
|
||||
--------------
|
||||
|
||||
|Graphene Logo| `Graphene <http://graphene-python.org>`__ |Build Status| |PyPI version| |Coverage Status|
|
||||
=========================================================================================================
|
||||
|
||||
|
||||
Introduction
|
||||
------------
|
||||
|
||||
`Graphene <http://graphene-python.org>`__ is a Python library for
|
||||
building GraphQL schemas/types fast and easily.
|
||||
`Graphene <http://graphene-python.org>`__ is an opinionated Python
|
||||
library for building GraphQL schemas/types fast and easily.
|
||||
|
||||
- **Easy to use:** Graphene helps you use GraphQL in Python without
|
||||
effort.
|
||||
|
@ -27,17 +27,18 @@ Integrations
|
|||
|
||||
Graphene has multiple integrations with different frameworks:
|
||||
|
||||
+---------------------+----------------------------------------------------------------------------------------------+
|
||||
| integration | Package |
|
||||
+=====================+==============================================================================================+
|
||||
| Django | `graphene-django <https://github.com/graphql-python/graphene-django/>`__ |
|
||||
+---------------------+----------------------------------------------------------------------------------------------+
|
||||
| SQLAlchemy | `graphene-sqlalchemy <https://github.com/graphql-python/graphene-sqlalchemy/>`__ |
|
||||
+---------------------+----------------------------------------------------------------------------------------------+
|
||||
| Google App Engine | `graphene-gae <https://github.com/graphql-python/graphene-gae/>`__ |
|
||||
+---------------------+----------------------------------------------------------------------------------------------+
|
||||
| Peewee | *In progress* (`Tracking Issue <https://github.com/graphql-python/graphene/issues/289>`__) |
|
||||
+---------------------+----------------------------------------------------------------------------------------------+
|
||||
+-------------------+-------------------------------------------------+
|
||||
| integration | Package |
|
||||
+===================+=================================================+
|
||||
| Django | `graphene-django <https:/ |
|
||||
| | /github.com/graphql-python/graphene-django/>`__ |
|
||||
+-------------------+-------------------------------------------------+
|
||||
| SQLAlchemy | `graphene-sqlalchemy <https://git |
|
||||
| | hub.com/graphql-python/graphene-sqlalchemy/>`__ |
|
||||
+-------------------+-------------------------------------------------+
|
||||
| Google App Engine | `graphene-gae <http |
|
||||
| | s://github.com/graphql-python/graphene-gae/>`__ |
|
||||
+-------------------+-------------------------------------------------+
|
||||
|
||||
Also, Graphene is fully compatible with the GraphQL spec, working
|
||||
seamlessly with all GraphQL clients, such as
|
||||
|
@ -48,17 +49,11 @@ seamlessly with all GraphQL clients, such as
|
|||
Installation
|
||||
------------
|
||||
|
||||
For instaling graphene, just run this command in your shell
|
||||
To install `graphene`, just run this command in your shell
|
||||
|
||||
.. code:: bash
|
||||
|
||||
pip install "graphene>=2.0"
|
||||
|
||||
2.0 Upgrade Guide
|
||||
-----------------
|
||||
|
||||
Please read `UPGRADE-v2.0.md </UPGRADE-v2.0.md>`__ to learn how to
|
||||
upgrade.
|
||||
pip install "graphene>=3.0"
|
||||
|
||||
Examples
|
||||
--------
|
||||
|
@ -67,26 +62,26 @@ Here is one example for you to get started:
|
|||
|
||||
.. code:: python
|
||||
|
||||
import graphene
|
||||
import graphene
|
||||
|
||||
class Query(graphene.ObjectType):
|
||||
hello = graphene.String(description='A typical hello world')
|
||||
class Query(graphene.ObjectType):
|
||||
hello = graphene.String(description='A typical hello world')
|
||||
|
||||
def resolve_hello(self, info):
|
||||
return 'World'
|
||||
def resolve_hello(self, info):
|
||||
return 'World'
|
||||
|
||||
schema = graphene.Schema(query=Query)
|
||||
schema = graphene.Schema(query=Query)
|
||||
|
||||
Then Querying ``graphene.Schema`` is as simple as:
|
||||
|
||||
.. code:: python
|
||||
|
||||
query = '''
|
||||
query SayHello {
|
||||
hello
|
||||
}
|
||||
'''
|
||||
result = schema.execute(query)
|
||||
query = '''
|
||||
query SayHello {
|
||||
hello
|
||||
}
|
||||
'''
|
||||
result = schema.execute(query)
|
||||
|
||||
If you want to learn even more, you can also check the following
|
||||
`examples <examples/>`__:
|
||||
|
@ -110,20 +105,20 @@ dependencies are installed by running:
|
|||
|
||||
.. code:: sh
|
||||
|
||||
virtualenv venv
|
||||
source venv/bin/activate
|
||||
pip install -e ".[test]"
|
||||
virtualenv venv
|
||||
source venv/bin/activate
|
||||
pip install -e ".[test]"
|
||||
|
||||
Well-written tests and maintaining good test coverage is important to
|
||||
this project. While developing, run new and existing tests with:
|
||||
|
||||
.. code:: sh
|
||||
|
||||
py.test graphene/relay/tests/test_node.py # Single file
|
||||
py.test graphene/relay # All tests in directory
|
||||
py.test graphene/relay/tests/test_node.py # Single file
|
||||
py.test graphene/relay # All tests in directory
|
||||
|
||||
Add the ``-s`` flag if you have introduced breakpoints into the code for
|
||||
debugging. Add the ``-v`` ("verbose") flag to get more detailed test
|
||||
debugging. Add the ``-v`` (“verbose”) flag to get more detailed test
|
||||
output. For even more detailed output, use ``-vv``. Check out the
|
||||
`pytest documentation <https://docs.pytest.org/en/latest/>`__ for more
|
||||
options and test running controls.
|
||||
|
@ -132,7 +127,7 @@ You can also run the benchmarks with:
|
|||
|
||||
.. code:: sh
|
||||
|
||||
py.test graphene --benchmark-only
|
||||
py.test graphene --benchmark-only
|
||||
|
||||
Graphene supports several versions of Python. To make sure that changes
|
||||
do not break compatibility with any of those versions, we use ``tox`` to
|
||||
|
@ -142,14 +137,14 @@ config file, just run:
|
|||
|
||||
.. code:: sh
|
||||
|
||||
tox
|
||||
tox
|
||||
|
||||
If you wish to run against a specific version defined in the ``tox.ini``
|
||||
file:
|
||||
|
||||
.. code:: sh
|
||||
|
||||
tox -e py36
|
||||
tox -e py36
|
||||
|
||||
Tox can only use whatever versions of Python are installed on your
|
||||
system. When you create a pull request, Travis will also be running the
|
||||
|
@ -168,7 +163,7 @@ An HTML version of the documentation is produced by running:
|
|||
|
||||
.. code:: sh
|
||||
|
||||
make docs
|
||||
make docs
|
||||
|
||||
.. |Graphene Logo| image:: http://graphene-python.org/favicon.png
|
||||
.. |Build Status| image:: https://travis-ci.org/graphql-python/graphene.svg?branch=master
|
||||
|
|
|
@ -153,7 +153,7 @@ class Query(ObjectType):
|
|||
```
|
||||
|
||||
Also, if you wanted to create an `ObjectType` that implements `Node`, you have to do it
|
||||
explicity.
|
||||
explicitly.
|
||||
|
||||
## Django
|
||||
|
||||
|
|
|
@ -123,7 +123,7 @@ def resolve_my_field(root, info, my_arg):
|
|||
return ...
|
||||
```
|
||||
|
||||
**PS.: Take care with receiving args like `my_arg` as above. This doesn't work for optional (non-required) arguments as stantard `Connection`'s arguments (first, before, after, before).**
|
||||
**PS.: Take care with receiving args like `my_arg` as above. This doesn't work for optional (non-required) arguments as standard `Connection`'s arguments (first, last, after, before).**
|
||||
You may need something like this:
|
||||
|
||||
```python
|
||||
|
|
16
docs/conf.py
16
docs/conf.py
|
@ -64,18 +64,18 @@ source_suffix = ".rst"
|
|||
master_doc = "index"
|
||||
|
||||
# General information about the project.
|
||||
project = u"Graphene"
|
||||
copyright = u"Graphene 2016"
|
||||
author = u"Syrus Akbary"
|
||||
project = "Graphene"
|
||||
copyright = "Graphene 2016"
|
||||
author = "Syrus Akbary"
|
||||
|
||||
# The version info for the project you're documenting, acts as replacement for
|
||||
# |version| and |release|, also used in various other places throughout the
|
||||
# built documents.
|
||||
#
|
||||
# The short X.Y version.
|
||||
version = u"1.0"
|
||||
version = "1.0"
|
||||
# The full version, including alpha/beta/rc tags.
|
||||
release = u"1.0"
|
||||
release = "1.0"
|
||||
|
||||
# The language for content autogenerated by Sphinx. Refer to documentation
|
||||
# for a list of supported languages.
|
||||
|
@ -278,7 +278,7 @@ latex_elements = {
|
|||
# (source start file, target name, title,
|
||||
# author, documentclass [howto, manual, or own class]).
|
||||
latex_documents = [
|
||||
(master_doc, "Graphene.tex", u"Graphene Documentation", u"Syrus Akbary", "manual")
|
||||
(master_doc, "Graphene.tex", "Graphene Documentation", "Syrus Akbary", "manual")
|
||||
]
|
||||
|
||||
# The name of an image file (relative to this directory) to place at the top of
|
||||
|
@ -318,7 +318,7 @@ latex_documents = [
|
|||
|
||||
# One entry per manual page. List of tuples
|
||||
# (source start file, name, description, authors, manual section).
|
||||
man_pages = [(master_doc, "graphene", u"Graphene Documentation", [author], 1)]
|
||||
man_pages = [(master_doc, "graphene", "Graphene Documentation", [author], 1)]
|
||||
|
||||
# If true, show URL addresses after external links.
|
||||
#
|
||||
|
@ -334,7 +334,7 @@ texinfo_documents = [
|
|||
(
|
||||
master_doc,
|
||||
"Graphene",
|
||||
u"Graphene Documentation",
|
||||
"Graphene Documentation",
|
||||
author,
|
||||
"Graphene",
|
||||
"One line description of project.",
|
||||
|
|
|
@ -4,7 +4,7 @@ Dataloader
|
|||
DataLoader is a generic utility to be used as part of your application's
|
||||
data fetching layer to provide a simplified and consistent API over
|
||||
various remote data sources such as databases or web services via batching
|
||||
and caching.
|
||||
and caching. It is provided by a separate package `aiodataloader <https://pypi.org/project/aiodataloader/>`.
|
||||
|
||||
|
||||
Batching
|
||||
|
@ -15,32 +15,31 @@ Create loaders by providing a batch loading function.
|
|||
|
||||
.. code:: python
|
||||
|
||||
from promise import Promise
|
||||
from promise.dataloader import DataLoader
|
||||
from aiodataloader import DataLoader
|
||||
|
||||
class UserLoader(DataLoader):
|
||||
def batch_load_fn(self, keys):
|
||||
# Here we return a promise that will result on the
|
||||
# corresponding user for each key in keys
|
||||
return Promise.resolve([get_user(id=key) for key in keys])
|
||||
async def batch_load_fn(self, keys):
|
||||
# Here we call a function to return a user for each key in keys
|
||||
return [get_user(id=key) for key in keys]
|
||||
|
||||
|
||||
A batch loading function accepts a list of keys, and returns a ``Promise``
|
||||
which resolves to a list of ``values``.
|
||||
A batch loading async function accepts a list of keys, and returns a list of ``values``.
|
||||
|
||||
Then load individual values from the loader. ``DataLoader`` will coalesce all
|
||||
individual loads which occur within a single frame of execution (executed once
|
||||
the wrapping promise is resolved) and then call your batch function with all
|
||||
requested keys.
|
||||
|
||||
``DataLoader`` will coalesce all individual loads which occur within a
|
||||
single frame of execution (executed once the wrapping event loop is resolved)
|
||||
and then call your batch function with all requested keys.
|
||||
|
||||
|
||||
.. code:: python
|
||||
|
||||
user_loader = UserLoader()
|
||||
|
||||
user_loader.load(1).then(lambda user: user_loader.load(user.best_friend_id))
|
||||
user1 = await user_loader.load(1)
|
||||
user1_best_friend = await user_loader.load(user1.best_friend_id))
|
||||
|
||||
user_loader.load(2).then(lambda user: user_loader.load(user.best_friend_id))
|
||||
user2 = await user_loader.load(2)
|
||||
user2_best_friend = await user_loader.load(user2.best_friend_id))
|
||||
|
||||
|
||||
A naive application may have issued *four* round-trips to a backend for the
|
||||
|
@ -54,9 +53,9 @@ make sure that you then order the query result for the results to match the keys
|
|||
.. code:: python
|
||||
|
||||
class UserLoader(DataLoader):
|
||||
def batch_load_fn(self, keys):
|
||||
async def batch_load_fn(self, keys):
|
||||
users = {user.id: user for user in User.objects.filter(id__in=keys)}
|
||||
return Promise.resolve([users.get(user_id) for user_id in keys])
|
||||
return [users.get(user_id) for user_id in keys]
|
||||
|
||||
|
||||
``DataLoader`` allows you to decouple unrelated parts of your application without
|
||||
|
@ -96,7 +95,7 @@ Consider the following GraphQL request:
|
|||
}
|
||||
|
||||
|
||||
Naively, if ``me``, ``bestFriend`` and ``friends`` each need to request the backend,
|
||||
If ``me``, ``bestFriend`` and ``friends`` each need to send a request to the backend,
|
||||
there could be at most 13 database requests!
|
||||
|
||||
|
||||
|
@ -111,8 +110,8 @@ leaner code and at most 4 database requests, and possibly fewer if there are cac
|
|||
best_friend = graphene.Field(lambda: User)
|
||||
friends = graphene.List(lambda: User)
|
||||
|
||||
def resolve_best_friend(root, info):
|
||||
return user_loader.load(root.best_friend_id)
|
||||
async def resolve_best_friend(root, info):
|
||||
return await user_loader.load(root.best_friend_id)
|
||||
|
||||
def resolve_friends(root, info):
|
||||
return user_loader.load_many(root.friend_ids)
|
||||
async def resolve_friends(root, info):
|
||||
return await user_loader.load_many(root.friend_ids)
|
||||
|
|
|
@ -4,5 +4,5 @@ File uploading
|
|||
File uploading is not part of the official GraphQL spec yet and is not natively
|
||||
implemented in Graphene.
|
||||
|
||||
If your server needs to support file uploading then you can use the libary: `graphene-file-upload <https://github.com/lmcgartland/graphene-file-upload>`_ which enhances Graphene to add file
|
||||
If your server needs to support file uploading then you can use the library: `graphene-file-upload <https://github.com/lmcgartland/graphene-file-upload>`_ which enhances Graphene to add file
|
||||
uploads and conforms to the unoffical GraphQL `multipart request spec <https://github.com/jaydenseric/graphql-multipart-request-spec>`_.
|
||||
|
|
|
@ -46,7 +46,7 @@ Functional example
|
|||
------------------
|
||||
|
||||
Middleware can also be defined as a function. Here we define a middleware that
|
||||
logs the time it takes to resolve each field
|
||||
logs the time it takes to resolve each field:
|
||||
|
||||
.. code:: python
|
||||
|
||||
|
|
|
@ -22,6 +22,7 @@ Usage
|
|||
Here is how you would implement depth-limiting on your schema.
|
||||
|
||||
.. code:: python
|
||||
|
||||
from graphql import validate, parse
|
||||
from graphene import ObjectType, Schema, String
|
||||
from graphene.validation import depth_limit_validator
|
||||
|
@ -37,7 +38,7 @@ Here is how you would implement depth-limiting on your schema.
|
|||
# will not be executed.
|
||||
|
||||
validation_errors = validate(
|
||||
schema=schema,
|
||||
schema=schema.graphql_schema,
|
||||
document_ast=parse('THE QUERY'),
|
||||
rules=(
|
||||
depth_limit_validator(
|
||||
|
@ -58,6 +59,7 @@ Usage
|
|||
Here is how you would disable introspection for your schema.
|
||||
|
||||
.. code:: python
|
||||
|
||||
from graphql import validate, parse
|
||||
from graphene import ObjectType, Schema, String
|
||||
from graphene.validation import DisableIntrospection
|
||||
|
@ -72,7 +74,7 @@ Here is how you would disable introspection for your schema.
|
|||
# introspection queries will not be executed.
|
||||
|
||||
validation_errors = validate(
|
||||
schema=schema,
|
||||
schema=schema.graphql_schema,
|
||||
document_ast=parse('THE QUERY'),
|
||||
rules=(
|
||||
DisableIntrospection,
|
||||
|
@ -92,6 +94,7 @@ reason. Here is an example query validator that visits field definitions in Grap
|
|||
if any of those fields are blacklisted:
|
||||
|
||||
.. code:: python
|
||||
|
||||
from graphql import GraphQLError
|
||||
from graphql.language import FieldNode
|
||||
from graphql.validation import ValidationRule
|
||||
|
|
|
@ -59,15 +59,15 @@ When we send a **Query** requesting only one **Field**, ``hello``, and specify a
|
|||
Requirements
|
||||
~~~~~~~~~~~~
|
||||
|
||||
- Python (2.7, 3.4, 3.5, 3.6, pypy)
|
||||
- Graphene (2.0)
|
||||
- Python (3.6, 3.7, 3.8, 3.9, 3.10, pypy)
|
||||
- Graphene (3.0)
|
||||
|
||||
Project setup
|
||||
~~~~~~~~~~~~~
|
||||
|
||||
.. code:: bash
|
||||
|
||||
pip install "graphene>=2.0"
|
||||
pip install "graphene>=3.0"
|
||||
|
||||
Creating a basic Schema
|
||||
~~~~~~~~~~~~~~~~~~~~~~~
|
||||
|
|
|
@ -86,7 +86,7 @@ In the Python ``Enum`` implementation you can access a member by initing the Enu
|
|||
assert Color(1) == Color.RED
|
||||
|
||||
|
||||
However, in Graphene ``Enum`` you need to call get to have the same effect:
|
||||
However, in Graphene ``Enum`` you need to call `.get` to have the same effect:
|
||||
|
||||
.. code:: python
|
||||
|
||||
|
|
|
@ -44,7 +44,7 @@ Both of these types have all of the fields from the ``Character`` interface,
|
|||
but also bring in extra fields, ``home_planet``, ``starships`` and
|
||||
``primary_function``, that are specific to that particular type of character.
|
||||
|
||||
The full GraphQL schema defition will look like this:
|
||||
The full GraphQL schema definition will look like this:
|
||||
|
||||
.. code::
|
||||
|
||||
|
|
|
@ -85,9 +85,9 @@ We should receive:
|
|||
|
||||
InputFields and InputObjectTypes
|
||||
----------------------------------
|
||||
InputFields are used in mutations to allow nested input data for mutations
|
||||
InputFields are used in mutations to allow nested input data for mutations.
|
||||
|
||||
To use an InputField you define an InputObjectType that specifies the structure of your input data
|
||||
To use an InputField you define an InputObjectType that specifies the structure of your input data:
|
||||
|
||||
|
||||
.. code:: python
|
||||
|
@ -112,7 +112,7 @@ To use an InputField you define an InputObjectType that specifies the structure
|
|||
return CreatePerson(person=person)
|
||||
|
||||
|
||||
Note that **name** and **age** are part of **person_data** now
|
||||
Note that **name** and **age** are part of **person_data** now.
|
||||
|
||||
Using the above mutation your new query would look like this:
|
||||
|
||||
|
@ -128,7 +128,7 @@ Using the above mutation your new query would look like this:
|
|||
}
|
||||
|
||||
InputObjectTypes can also be fields of InputObjectTypes allowing you to have
|
||||
as complex of input data as you need
|
||||
as complex of input data as you need:
|
||||
|
||||
.. code:: python
|
||||
|
||||
|
@ -160,7 +160,7 @@ To return an existing ObjectType instead of a mutation-specific type, set the **
|
|||
def mutate(root, info, name):
|
||||
return Person(name=name)
|
||||
|
||||
Then, if we query (``schema.execute(query_str)``) the following:
|
||||
Then, if we query (``schema.execute(query_str)``) with the following:
|
||||
|
||||
.. code::
|
||||
|
||||
|
|
|
@ -270,7 +270,7 @@ The following is an example for creating a DateTime scalar:
|
|||
return dt.isoformat()
|
||||
|
||||
@staticmethod
|
||||
def parse_literal(node):
|
||||
def parse_literal(node, _variables=None):
|
||||
if isinstance(node, ast.StringValue):
|
||||
return datetime.datetime.strptime(
|
||||
node.value, "%Y-%m-%dT%H:%M:%S.%f")
|
||||
|
|
|
@ -44,7 +44,7 @@ There are some cases where the schema cannot access all of the types that we pla
|
|||
For example, when a field returns an ``Interface``, the schema doesn't know about any of the
|
||||
implementations.
|
||||
|
||||
In this case, we need to use the ``types`` argument when creating the Schema.
|
||||
In this case, we need to use the ``types`` argument when creating the Schema:
|
||||
|
||||
|
||||
.. code:: python
|
||||
|
@ -63,7 +63,7 @@ By default all field and argument names (that are not
|
|||
explicitly set with the ``name`` arg) will be converted from
|
||||
``snake_case`` to ``camelCase`` (as the API is usually being consumed by a js/mobile client)
|
||||
|
||||
For example with the ObjectType
|
||||
For example with the ObjectType the ``last_name`` field name is converted to ``lastName``:
|
||||
|
||||
.. code:: python
|
||||
|
||||
|
@ -71,12 +71,10 @@ For example with the ObjectType
|
|||
last_name = graphene.String()
|
||||
other_name = graphene.String(name='_other_Name')
|
||||
|
||||
the ``last_name`` field name is converted to ``lastName``.
|
||||
|
||||
In case you don't want to apply this transformation, provide a ``name`` argument to the field constructor.
|
||||
``other_name`` converts to ``_other_Name`` (without further transformations).
|
||||
|
||||
Your query should look like
|
||||
Your query should look like:
|
||||
|
||||
.. code::
|
||||
|
||||
|
@ -86,7 +84,7 @@ Your query should look like
|
|||
}
|
||||
|
||||
|
||||
To disable this behavior, set the ``auto_camelcase`` to ``False`` upon schema instantiation.
|
||||
To disable this behavior, set the ``auto_camelcase`` to ``False`` upon schema instantiation:
|
||||
|
||||
.. code:: python
|
||||
|
||||
|
|
|
@ -7,7 +7,7 @@ to specify any common fields between the types.
|
|||
The basics:
|
||||
|
||||
- Each Union is a Python class that inherits from ``graphene.Union``.
|
||||
- Unions don't have any fields on it, just links to the possible objecttypes.
|
||||
- Unions don't have any fields on it, just links to the possible ObjectTypes.
|
||||
|
||||
Quick example
|
||||
-------------
|
||||
|
|
|
@ -49,7 +49,7 @@ type Faction implements Node {
|
|||
name: String
|
||||
|
||||
"""The ships used by the faction."""
|
||||
ships(before: String = null, after: String = null, first: Int = null, last: Int = null): ShipConnection
|
||||
ships(before: String, after: String, first: Int, last: Int): ShipConnection
|
||||
}
|
||||
|
||||
"""An object with an ID"""
|
||||
|
@ -115,5 +115,4 @@ input IntroduceShipInput {
|
|||
shipName: String!
|
||||
factionId: String!
|
||||
clientMutationId: String
|
||||
}
|
||||
'''
|
||||
}'''
|
||||
|
|
|
@ -9,7 +9,7 @@ client = Client(schema)
|
|||
|
||||
|
||||
def test_str_schema(snapshot):
|
||||
snapshot.assert_match(str(schema))
|
||||
snapshot.assert_match(str(schema).strip())
|
||||
|
||||
|
||||
def test_correctly_fetches_id_name_rebels(snapshot):
|
||||
|
|
|
@ -41,7 +41,7 @@ from .types import (
|
|||
from .utils.module_loading import lazy_import
|
||||
from .utils.resolve_only_args import resolve_only_args
|
||||
|
||||
VERSION = (3, 0, 0, "beta", 7)
|
||||
VERSION = (3, 1, 0, "final", 0)
|
||||
|
||||
|
||||
__version__ = get_version(VERSION)
|
||||
|
|
|
@ -19,10 +19,7 @@ def get_version(version=None):
|
|||
sub = ""
|
||||
if version[3] == "alpha" and version[4] == 0:
|
||||
git_changeset = get_git_changeset()
|
||||
if git_changeset:
|
||||
sub = ".dev%s" % git_changeset
|
||||
else:
|
||||
sub = ".dev"
|
||||
sub = ".dev%s" % git_changeset if git_changeset else ".dev"
|
||||
elif version[3] != "final":
|
||||
mapping = {"alpha": "a", "beta": "b", "rc": "rc"}
|
||||
sub = mapping[version[3]] + str(version[4])
|
||||
|
|
|
@ -18,11 +18,7 @@ def is_node(objecttype):
|
|||
if not issubclass(objecttype, ObjectType):
|
||||
return False
|
||||
|
||||
for i in objecttype._meta.interfaces:
|
||||
if issubclass(i, Node):
|
||||
return True
|
||||
|
||||
return False
|
||||
return any(issubclass(i, Node) for i in objecttype._meta.interfaces)
|
||||
|
||||
|
||||
class GlobalID(Field):
|
||||
|
@ -90,11 +86,13 @@ class Node(AbstractNode):
|
|||
def get_node_from_global_id(cls, info, global_id, only_type=None):
|
||||
try:
|
||||
_type, _id = cls.from_global_id(global_id)
|
||||
if not _type:
|
||||
raise ValueError("Invalid Global ID")
|
||||
except Exception as e:
|
||||
raise Exception(
|
||||
f'Unable to parse global ID "{global_id}". '
|
||||
'Make sure it is a base64 encoded string in the format: "TypeName:id". '
|
||||
f"Exception message: {str(e)}"
|
||||
f"Exception message: {e}"
|
||||
)
|
||||
|
||||
graphene_type = info.schema.get_type(_type)
|
||||
|
|
|
@ -1,7 +1,7 @@
|
|||
import re
|
||||
from graphql_relay import to_global_id
|
||||
from textwrap import dedent
|
||||
|
||||
from graphene.tests.utils import dedent
|
||||
from graphql_relay import to_global_id
|
||||
|
||||
from ...types import ObjectType, Schema, String
|
||||
from ..node import Node, is_node
|
||||
|
@ -171,8 +171,10 @@ def test_node_field_only_lazy_type_wrong():
|
|||
|
||||
|
||||
def test_str_schema():
|
||||
assert str(schema) == dedent(
|
||||
'''
|
||||
assert (
|
||||
str(schema).strip()
|
||||
== dedent(
|
||||
'''
|
||||
schema {
|
||||
query: RootQuery
|
||||
}
|
||||
|
@ -213,4 +215,5 @@ def test_str_schema():
|
|||
): MyNode
|
||||
}
|
||||
'''
|
||||
).strip()
|
||||
)
|
||||
|
|
|
@ -1,6 +1,6 @@
|
|||
from graphql import graphql_sync
|
||||
from textwrap import dedent
|
||||
|
||||
from graphene.tests.utils import dedent
|
||||
from graphql import graphql_sync
|
||||
|
||||
from ...types import Interface, ObjectType, Schema
|
||||
from ...types.scalars import Int, String
|
||||
|
@ -54,8 +54,10 @@ graphql_schema = schema.graphql_schema
|
|||
|
||||
|
||||
def test_str_schema_correct():
|
||||
assert str(schema) == dedent(
|
||||
'''
|
||||
assert (
|
||||
str(schema).strip()
|
||||
== dedent(
|
||||
'''
|
||||
schema {
|
||||
query: RootQuery
|
||||
}
|
||||
|
@ -93,6 +95,7 @@ def test_str_schema_correct():
|
|||
): Node
|
||||
}
|
||||
'''
|
||||
).strip()
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -1,5 +1,4 @@
|
|||
from promise import Promise, is_thenable
|
||||
from graphql.error import format_error as format_graphql_error
|
||||
from graphql.error import GraphQLError
|
||||
|
||||
from graphene.types.schema import Schema
|
||||
|
@ -7,7 +6,7 @@ from graphene.types.schema import Schema
|
|||
|
||||
def default_format_error(error):
|
||||
if isinstance(error, GraphQLError):
|
||||
return format_graphql_error(error)
|
||||
return error.formatted
|
||||
return {"message": str(error)}
|
||||
|
||||
|
||||
|
|
36
graphene/tests/issues/test_1394.py
Normal file
36
graphene/tests/issues/test_1394.py
Normal file
|
@ -0,0 +1,36 @@
|
|||
from ...types import ObjectType, Schema, String, NonNull
|
||||
|
||||
|
||||
class Query(ObjectType):
|
||||
hello = String(input=NonNull(String))
|
||||
|
||||
def resolve_hello(self, info, input):
|
||||
if input == "nothing":
|
||||
return None
|
||||
return f"Hello {input}!"
|
||||
|
||||
|
||||
schema = Schema(query=Query)
|
||||
|
||||
|
||||
def test_required_input_provided():
|
||||
"""
|
||||
Test that a required argument works when provided.
|
||||
"""
|
||||
input_value = "Potato"
|
||||
result = schema.execute('{ hello(input: "%s") }' % input_value)
|
||||
assert not result.errors
|
||||
assert result.data == {"hello": "Hello Potato!"}
|
||||
|
||||
|
||||
def test_required_input_missing():
|
||||
"""
|
||||
Test that a required argument raised an error if not provided.
|
||||
"""
|
||||
result = schema.execute("{ hello }")
|
||||
assert result.errors
|
||||
assert len(result.errors) == 1
|
||||
assert (
|
||||
result.errors[0].message
|
||||
== "Field 'hello' argument 'input' of type 'String!' is required, but it was not provided."
|
||||
)
|
53
graphene/tests/issues/test_1419.py
Normal file
53
graphene/tests/issues/test_1419.py
Normal file
|
@ -0,0 +1,53 @@
|
|||
import pytest
|
||||
|
||||
from ...types.base64 import Base64
|
||||
from ...types.datetime import Date, DateTime
|
||||
from ...types.decimal import Decimal
|
||||
from ...types.generic import GenericScalar
|
||||
from ...types.json import JSONString
|
||||
from ...types.objecttype import ObjectType
|
||||
from ...types.scalars import ID, BigInt, Boolean, Float, Int, String
|
||||
from ...types.schema import Schema
|
||||
from ...types.uuid import UUID
|
||||
|
||||
|
||||
@pytest.mark.parametrize(
|
||||
"input_type,input_value",
|
||||
[
|
||||
(Date, '"2022-02-02"'),
|
||||
(GenericScalar, '"foo"'),
|
||||
(Int, "1"),
|
||||
(BigInt, "12345678901234567890"),
|
||||
(Float, "1.1"),
|
||||
(String, '"foo"'),
|
||||
(Boolean, "true"),
|
||||
(ID, "1"),
|
||||
(DateTime, '"2022-02-02T11:11:11"'),
|
||||
(UUID, '"cbebbc62-758e-4f75-a890-bc73b5017d81"'),
|
||||
(Decimal, '"1.1"'),
|
||||
(JSONString, '"{\\"key\\":\\"foo\\",\\"value\\":\\"bar\\"}"'),
|
||||
(Base64, '"Q2hlbG8gd29ycmxkCg=="'),
|
||||
],
|
||||
)
|
||||
def test_parse_literal_with_variables(input_type, input_value):
|
||||
# input_b needs to be evaluated as literal while the variable dict for
|
||||
# input_a is passed along.
|
||||
|
||||
class Query(ObjectType):
|
||||
generic = GenericScalar(input_a=GenericScalar(), input_b=input_type())
|
||||
|
||||
def resolve_generic(self, info, input_a=None, input_b=None):
|
||||
return input
|
||||
|
||||
schema = Schema(query=Query)
|
||||
|
||||
query = f"""
|
||||
query Test($a: GenericScalar){{
|
||||
generic(inputA: $a, inputB: {input_value})
|
||||
}}
|
||||
"""
|
||||
result = schema.execute(
|
||||
query,
|
||||
variables={"a": "bar"},
|
||||
)
|
||||
assert not result.errors
|
|
@ -1,9 +0,0 @@
|
|||
from textwrap import dedent as _dedent
|
||||
|
||||
|
||||
def dedent(text: str) -> str:
|
||||
"""Fix indentation of given text by removing leading spaces and tabs.
|
||||
Also removes leading newlines and trailing spaces and tabs, but keeps trailing
|
||||
newlines.
|
||||
"""
|
||||
return _dedent(text.lstrip("\n").rstrip(" \t"))
|
|
@ -1,4 +1,5 @@
|
|||
from itertools import chain
|
||||
from graphql import Undefined
|
||||
|
||||
from .dynamic import Dynamic
|
||||
from .mountedtype import MountedType
|
||||
|
@ -41,7 +42,7 @@ class Argument(MountedType):
|
|||
def __init__(
|
||||
self,
|
||||
type_,
|
||||
default_value=None,
|
||||
default_value=Undefined,
|
||||
description=None,
|
||||
name=None,
|
||||
required=False,
|
||||
|
|
|
@ -22,7 +22,7 @@ class Base64(Scalar):
|
|||
return b64encode(value).decode("utf-8")
|
||||
|
||||
@classmethod
|
||||
def parse_literal(cls, node):
|
||||
def parse_literal(cls, node, _variables=None):
|
||||
if not isinstance(node, StringValueNode):
|
||||
raise GraphQLError(
|
||||
f"Base64 cannot represent non-string value: {print_ast(node)}"
|
||||
|
|
|
@ -2,6 +2,7 @@ from __future__ import absolute_import
|
|||
|
||||
from decimal import Decimal as _Decimal
|
||||
|
||||
from graphql import Undefined
|
||||
from graphql.language.ast import StringValueNode, IntValueNode
|
||||
|
||||
from .scalars import Scalar
|
||||
|
@ -22,13 +23,14 @@ class Decimal(Scalar):
|
|||
return str(dec)
|
||||
|
||||
@classmethod
|
||||
def parse_literal(cls, node):
|
||||
def parse_literal(cls, node, _variables=None):
|
||||
if isinstance(node, (StringValueNode, IntValueNode)):
|
||||
return cls.parse_value(node.value)
|
||||
return Undefined
|
||||
|
||||
@staticmethod
|
||||
def parse_value(value):
|
||||
try:
|
||||
return _Decimal(value)
|
||||
except ValueError:
|
||||
return None
|
||||
except Exception:
|
||||
return Undefined
|
||||
|
|
|
@ -7,7 +7,6 @@ from graphql import (
|
|||
GraphQLObjectType,
|
||||
GraphQLScalarType,
|
||||
GraphQLUnionType,
|
||||
Undefined,
|
||||
)
|
||||
|
||||
|
||||
|
@ -50,7 +49,7 @@ class GrapheneEnumType(GrapheneGraphQLType, GraphQLEnumType):
|
|||
try:
|
||||
value = enum[value]
|
||||
except KeyError:
|
||||
return Undefined
|
||||
pass
|
||||
return super(GrapheneEnumType, self).serialize(value)
|
||||
|
||||
|
||||
|
|
|
@ -52,7 +52,10 @@ class EnumMeta(SubclassWithMeta_Meta):
|
|||
return super(EnumMeta, cls).__call__(*args, **kwargs)
|
||||
# return cls._meta.enum(*args, **kwargs)
|
||||
|
||||
def from_enum(cls, enum, description=None, deprecation_reason=None): # noqa: N805
|
||||
def from_enum(
|
||||
cls, enum, name=None, description=None, deprecation_reason=None
|
||||
): # noqa: N805
|
||||
name = name or enum.__name__
|
||||
description = description or enum.__doc__
|
||||
meta_dict = {
|
||||
"enum": enum,
|
||||
|
@ -60,7 +63,7 @@ class EnumMeta(SubclassWithMeta_Meta):
|
|||
"deprecation_reason": deprecation_reason,
|
||||
}
|
||||
meta_class = type("Meta", (object,), meta_dict)
|
||||
return type(meta_class.enum.__name__, (Enum,), {"Meta": meta_class})
|
||||
return type(name, (Enum,), {"Meta": meta_class})
|
||||
|
||||
|
||||
class Enum(UnmountedType, BaseType, metaclass=EnumMeta):
|
||||
|
|
|
@ -29,7 +29,7 @@ class GenericScalar(Scalar):
|
|||
parse_value = identity
|
||||
|
||||
@staticmethod
|
||||
def parse_literal(ast):
|
||||
def parse_literal(ast, _variables=None):
|
||||
if isinstance(ast, (StringValueNode, BooleanValueNode)):
|
||||
return ast.value
|
||||
elif isinstance(ast, IntValueNode):
|
||||
|
|
|
@ -5,11 +5,12 @@ from .utils import yank_fields_from_attrs
|
|||
# For static type checking with Mypy
|
||||
MYPY = False
|
||||
if MYPY:
|
||||
from typing import Dict # NOQA
|
||||
from typing import Dict, Iterable, Type # NOQA
|
||||
|
||||
|
||||
class InterfaceOptions(BaseOptions):
|
||||
fields = None # type: Dict[str, Field]
|
||||
interfaces = () # type: Iterable[Type[Interface]]
|
||||
|
||||
|
||||
class Interface(BaseType):
|
||||
|
@ -45,7 +46,7 @@ class Interface(BaseType):
|
|||
"""
|
||||
|
||||
@classmethod
|
||||
def __init_subclass_with_meta__(cls, _meta=None, **options):
|
||||
def __init_subclass_with_meta__(cls, _meta=None, interfaces=(), **options):
|
||||
if not _meta:
|
||||
_meta = InterfaceOptions(cls)
|
||||
|
||||
|
@ -58,6 +59,9 @@ class Interface(BaseType):
|
|||
else:
|
||||
_meta.fields = fields
|
||||
|
||||
if not _meta.interfaces:
|
||||
_meta.interfaces = interfaces
|
||||
|
||||
super(Interface, cls).__init_subclass_with_meta__(_meta=_meta, **options)
|
||||
|
||||
@classmethod
|
||||
|
|
|
@ -2,6 +2,7 @@ from __future__ import absolute_import
|
|||
|
||||
import json
|
||||
|
||||
from graphql import Undefined
|
||||
from graphql.language.ast import StringValueNode
|
||||
|
||||
from .scalars import Scalar
|
||||
|
@ -20,9 +21,13 @@ class JSONString(Scalar):
|
|||
return json.dumps(dt)
|
||||
|
||||
@staticmethod
|
||||
def parse_literal(node):
|
||||
def parse_literal(node, _variables=None):
|
||||
if isinstance(node, StringValueNode):
|
||||
return json.loads(node.value)
|
||||
try:
|
||||
return json.loads(node.value)
|
||||
except Exception as error:
|
||||
raise ValueError(f"Badly formed JSONString: {str(error)}")
|
||||
return Undefined
|
||||
|
||||
@staticmethod
|
||||
def parse_value(value):
|
||||
|
|
|
@ -29,21 +29,21 @@ class Mutation(ObjectType):
|
|||
|
||||
.. code:: python
|
||||
|
||||
from graphene import Mutation, ObjectType, String, Boolean, Field
|
||||
import graphene
|
||||
|
||||
class CreatePerson(Mutation):
|
||||
class CreatePerson(graphene.Mutation):
|
||||
class Arguments:
|
||||
name = String()
|
||||
name = graphene.String()
|
||||
|
||||
ok = Boolean()
|
||||
person = Field(Person)
|
||||
ok = graphene.Boolean()
|
||||
person = graphene.Field(Person)
|
||||
|
||||
def mutate(parent, info, name):
|
||||
person = Person(name=name)
|
||||
ok = True
|
||||
return CreatePerson(person=person, ok=ok)
|
||||
|
||||
class Mutation(ObjectType):
|
||||
class Mutation(graphene.ObjectType):
|
||||
create_person = CreatePerson.Field()
|
||||
|
||||
Meta class options (optional):
|
||||
|
@ -101,10 +101,7 @@ class Mutation(ObjectType):
|
|||
"Read more:"
|
||||
" https://github.com/graphql-python/graphene/blob/v2.0.0/UPGRADE-v2.0.md#mutation-input"
|
||||
)
|
||||
if input_class:
|
||||
arguments = props(input_class)
|
||||
else:
|
||||
arguments = {}
|
||||
arguments = props(input_class) if input_class else {}
|
||||
if not resolver:
|
||||
mutate = getattr(cls, "mutate", None)
|
||||
assert mutate, "All mutations must define a mutate method in it"
|
||||
|
|
|
@ -7,9 +7,7 @@ def dict_resolver(attname, default_value, root, info, **args):
|
|||
|
||||
|
||||
def dict_or_attr_resolver(attname, default_value, root, info, **args):
|
||||
resolver = attr_resolver
|
||||
if isinstance(root, dict):
|
||||
resolver = dict_resolver
|
||||
resolver = dict_resolver if isinstance(root, dict) else attr_resolver
|
||||
return resolver(attname, default_value, root, info, **args)
|
||||
|
||||
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
from typing import Any
|
||||
|
||||
from graphql import Undefined
|
||||
from graphql.language.ast import (
|
||||
BooleanValueNode,
|
||||
FloatValueNode,
|
||||
|
@ -67,19 +68,21 @@ class Int(Scalar):
|
|||
try:
|
||||
num = int(float(value))
|
||||
except ValueError:
|
||||
return None
|
||||
return Undefined
|
||||
if MIN_INT <= num <= MAX_INT:
|
||||
return num
|
||||
return Undefined
|
||||
|
||||
serialize = coerce_int
|
||||
parse_value = coerce_int
|
||||
|
||||
@staticmethod
|
||||
def parse_literal(ast):
|
||||
def parse_literal(ast, _variables=None):
|
||||
if isinstance(ast, IntValueNode):
|
||||
num = int(ast.value)
|
||||
if MIN_INT <= num <= MAX_INT:
|
||||
return num
|
||||
return Undefined
|
||||
|
||||
|
||||
class BigInt(Scalar):
|
||||
|
@ -97,16 +100,17 @@ class BigInt(Scalar):
|
|||
try:
|
||||
num = int(float(value))
|
||||
except ValueError:
|
||||
return None
|
||||
return Undefined
|
||||
return num
|
||||
|
||||
serialize = coerce_int
|
||||
parse_value = coerce_int
|
||||
|
||||
@staticmethod
|
||||
def parse_literal(ast):
|
||||
def parse_literal(ast, _variables=None):
|
||||
if isinstance(ast, IntValueNode):
|
||||
return int(ast.value)
|
||||
return Undefined
|
||||
|
||||
|
||||
class Float(Scalar):
|
||||
|
@ -122,15 +126,16 @@ class Float(Scalar):
|
|||
try:
|
||||
return float(value)
|
||||
except ValueError:
|
||||
return None
|
||||
return Undefined
|
||||
|
||||
serialize = coerce_float
|
||||
parse_value = coerce_float
|
||||
|
||||
@staticmethod
|
||||
def parse_literal(ast):
|
||||
def parse_literal(ast, _variables=None):
|
||||
if isinstance(ast, (FloatValueNode, IntValueNode)):
|
||||
return float(ast.value)
|
||||
return Undefined
|
||||
|
||||
|
||||
class String(Scalar):
|
||||
|
@ -143,16 +148,17 @@ class String(Scalar):
|
|||
@staticmethod
|
||||
def coerce_string(value):
|
||||
if isinstance(value, bool):
|
||||
return u"true" if value else u"false"
|
||||
return "true" if value else "false"
|
||||
return str(value)
|
||||
|
||||
serialize = coerce_string
|
||||
parse_value = coerce_string
|
||||
|
||||
@staticmethod
|
||||
def parse_literal(ast):
|
||||
def parse_literal(ast, _variables=None):
|
||||
if isinstance(ast, StringValueNode):
|
||||
return ast.value
|
||||
return Undefined
|
||||
|
||||
|
||||
class Boolean(Scalar):
|
||||
|
@ -164,9 +170,10 @@ class Boolean(Scalar):
|
|||
parse_value = bool
|
||||
|
||||
@staticmethod
|
||||
def parse_literal(ast):
|
||||
def parse_literal(ast, _variables=None):
|
||||
if isinstance(ast, BooleanValueNode):
|
||||
return ast.value
|
||||
return Undefined
|
||||
|
||||
|
||||
class ID(Scalar):
|
||||
|
@ -182,6 +189,7 @@ class ID(Scalar):
|
|||
parse_value = str
|
||||
|
||||
@staticmethod
|
||||
def parse_literal(ast):
|
||||
def parse_literal(ast, _variables=None):
|
||||
if isinstance(ast, (StringValueNode, IntValueNode)):
|
||||
return ast.value
|
||||
return Undefined
|
||||
|
|
|
@ -233,11 +233,20 @@ class TypeMap(dict):
|
|||
else None
|
||||
)
|
||||
|
||||
def interfaces():
|
||||
interfaces = []
|
||||
for graphene_interface in graphene_type._meta.interfaces:
|
||||
interface = self.add_type(graphene_interface)
|
||||
assert interface.graphene_type == graphene_interface
|
||||
interfaces.append(interface)
|
||||
return interfaces
|
||||
|
||||
return GrapheneInterfaceType(
|
||||
graphene_type=graphene_type,
|
||||
name=graphene_type._meta.name,
|
||||
description=graphene_type._meta.description,
|
||||
fields=partial(self.create_fields_for_type, graphene_type),
|
||||
interfaces=interfaces,
|
||||
resolve_type=resolve_type,
|
||||
)
|
||||
|
||||
|
@ -376,19 +385,11 @@ class TypeMap(dict):
|
|||
def resolve_type(self, resolve_type_func, type_name, root, info, _type):
|
||||
type_ = resolve_type_func(root, info)
|
||||
|
||||
if not type_:
|
||||
return_type = self[type_name]
|
||||
return default_type_resolver(root, info, return_type)
|
||||
|
||||
if inspect.isclass(type_) and issubclass(type_, ObjectType):
|
||||
graphql_type = self.get(type_._meta.name)
|
||||
assert graphql_type, f"Can't find type {type_._meta.name} in schema"
|
||||
assert (
|
||||
graphql_type.graphene_type == type_
|
||||
), f"The type {type_} does not match with the associated graphene type {graphql_type.graphene_type}."
|
||||
return graphql_type
|
||||
return type_._meta.name
|
||||
|
||||
return type_
|
||||
return_type = self[type_name]
|
||||
return default_type_resolver(root, info, return_type)
|
||||
|
||||
|
||||
class Schema:
|
||||
|
|
|
@ -39,8 +39,25 @@ def test_bad_decimal_query():
|
|||
not_a_decimal = "Nobody expects the Spanish Inquisition!"
|
||||
|
||||
result = schema.execute("""{ decimal(input: "%s") }""" % not_a_decimal)
|
||||
assert result.errors
|
||||
assert len(result.errors) == 1
|
||||
assert result.data is None
|
||||
assert (
|
||||
result.errors[0].message
|
||||
== "Expected value of type 'Decimal', found \"Nobody expects the Spanish Inquisition!\"."
|
||||
)
|
||||
|
||||
result = schema.execute("{ decimal(input: true) }")
|
||||
assert result.errors
|
||||
assert len(result.errors) == 1
|
||||
assert result.data is None
|
||||
assert result.errors[0].message == "Expected value of type 'Decimal', found true."
|
||||
|
||||
result = schema.execute("{ decimal(input: 1.2) }")
|
||||
assert result.errors
|
||||
assert len(result.errors) == 1
|
||||
assert result.data is None
|
||||
assert result.errors[0].message == "Expected value of type 'Decimal', found 1.2."
|
||||
|
||||
|
||||
def test_decimal_string_query_integer():
|
||||
|
|
|
@ -251,19 +251,22 @@ def test_enum_types():
|
|||
|
||||
schema = Schema(query=Query)
|
||||
|
||||
assert str(schema) == dedent(
|
||||
'''\
|
||||
type Query {
|
||||
color: Color!
|
||||
}
|
||||
assert (
|
||||
str(schema).strip()
|
||||
== dedent(
|
||||
'''
|
||||
type Query {
|
||||
color: Color!
|
||||
}
|
||||
|
||||
"""Primary colors"""
|
||||
enum Color {
|
||||
RED
|
||||
YELLOW
|
||||
BLUE
|
||||
}
|
||||
'''
|
||||
"""Primary colors"""
|
||||
enum Color {
|
||||
RED
|
||||
YELLOW
|
||||
BLUE
|
||||
}
|
||||
'''
|
||||
).strip()
|
||||
)
|
||||
|
||||
|
||||
|
@ -325,6 +328,52 @@ def test_enum_resolver_compat():
|
|||
assert results.data["colorByName"] == Color.RED.name
|
||||
|
||||
|
||||
def test_enum_with_name():
|
||||
from enum import Enum as PyEnum
|
||||
|
||||
class Color(PyEnum):
|
||||
RED = 1
|
||||
YELLOW = 2
|
||||
BLUE = 3
|
||||
|
||||
GColor = Enum.from_enum(Color, description="original colors")
|
||||
UniqueGColor = Enum.from_enum(
|
||||
Color, name="UniqueColor", description="unique colors"
|
||||
)
|
||||
|
||||
class Query(ObjectType):
|
||||
color = GColor(required=True)
|
||||
unique_color = UniqueGColor(required=True)
|
||||
|
||||
schema = Schema(query=Query)
|
||||
|
||||
assert (
|
||||
str(schema).strip()
|
||||
== dedent(
|
||||
'''
|
||||
type Query {
|
||||
color: Color!
|
||||
uniqueColor: UniqueColor!
|
||||
}
|
||||
|
||||
"""original colors"""
|
||||
enum Color {
|
||||
RED
|
||||
YELLOW
|
||||
BLUE
|
||||
}
|
||||
|
||||
"""unique colors"""
|
||||
enum UniqueColor {
|
||||
RED
|
||||
YELLOW
|
||||
BLUE
|
||||
}
|
||||
'''
|
||||
).strip()
|
||||
)
|
||||
|
||||
|
||||
def test_enum_resolver_invalid():
|
||||
from enum import Enum as PyEnum
|
||||
|
||||
|
@ -345,10 +394,7 @@ def test_enum_resolver_invalid():
|
|||
|
||||
results = schema.execute("query { color }")
|
||||
assert results.errors
|
||||
assert (
|
||||
results.errors[0].message
|
||||
== "Expected a value of type 'Color' but received: 'BLACK'"
|
||||
)
|
||||
assert results.errors[0].message == "Enum 'Color' cannot represent value: 'BLACK'"
|
||||
|
||||
|
||||
def test_field_enum_argument():
|
||||
|
@ -460,12 +506,13 @@ def test_mutation_enum_input_type():
|
|||
|
||||
schema = Schema(query=Query, mutation=MyMutation)
|
||||
result = schema.execute(
|
||||
""" mutation MyMutation {
|
||||
createPaint(colorInput: { color: RED }) {
|
||||
color
|
||||
"""
|
||||
mutation MyMutation {
|
||||
createPaint(colorInput: { color: RED }) {
|
||||
color
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
"""
|
||||
)
|
||||
assert not result.errors
|
||||
assert result.data == {"createPaint": {"color": "RED"}}
|
||||
|
|
|
@ -25,13 +25,18 @@ def test_generate_interface():
|
|||
|
||||
|
||||
def test_generate_interface_with_meta():
|
||||
class MyFirstInterface(Interface):
|
||||
pass
|
||||
|
||||
class MyInterface(Interface):
|
||||
class Meta:
|
||||
name = "MyOtherInterface"
|
||||
description = "Documentation"
|
||||
interfaces = [MyFirstInterface]
|
||||
|
||||
assert MyInterface._meta.name == "MyOtherInterface"
|
||||
assert MyInterface._meta.description == "Documentation"
|
||||
assert MyInterface._meta.interfaces == [MyFirstInterface]
|
||||
|
||||
|
||||
def test_generate_interface_with_fields():
|
||||
|
|
|
@ -21,6 +21,10 @@ def test_jsonstring_query():
|
|||
assert not result.errors
|
||||
assert result.data == {"json": json_value}
|
||||
|
||||
result = schema.execute("""{ json(input: "{}") }""")
|
||||
assert not result.errors
|
||||
assert result.data == {"json": "{}"}
|
||||
|
||||
|
||||
def test_jsonstring_query_variable():
|
||||
json_value = '{"key": "value"}'
|
||||
|
@ -31,3 +35,51 @@ def test_jsonstring_query_variable():
|
|||
)
|
||||
assert not result.errors
|
||||
assert result.data == {"json": json_value}
|
||||
|
||||
|
||||
def test_jsonstring_optional_uuid_input():
|
||||
"""
|
||||
Test that we can provide a null value to an optional input
|
||||
"""
|
||||
result = schema.execute("{ json(input: null) }")
|
||||
assert not result.errors
|
||||
assert result.data == {"json": None}
|
||||
|
||||
|
||||
def test_jsonstring_invalid_query():
|
||||
"""
|
||||
Test that if an invalid type is provided we get an error
|
||||
"""
|
||||
result = schema.execute("{ json(input: 1) }")
|
||||
assert result.errors
|
||||
assert len(result.errors) == 1
|
||||
assert result.errors[0].message == "Expected value of type 'JSONString', found 1."
|
||||
|
||||
result = schema.execute("{ json(input: {}) }")
|
||||
assert result.errors
|
||||
assert len(result.errors) == 1
|
||||
assert result.errors[0].message == "Expected value of type 'JSONString', found {}."
|
||||
|
||||
result = schema.execute('{ json(input: "a") }')
|
||||
assert result.errors
|
||||
assert len(result.errors) == 1
|
||||
assert result.errors[0].message == (
|
||||
"Expected value of type 'JSONString', found \"a\"; "
|
||||
"Badly formed JSONString: Expecting value: line 1 column 1 (char 0)"
|
||||
)
|
||||
|
||||
result = schema.execute("""{ json(input: "{\\'key\\': 0}") }""")
|
||||
assert result.errors
|
||||
assert len(result.errors) == 1
|
||||
assert (
|
||||
result.errors[0].message
|
||||
== "Syntax Error: Invalid character escape sequence: '\\''."
|
||||
)
|
||||
|
||||
result = schema.execute("""{ json(input: "{\\"key\\": 0,}") }""")
|
||||
assert result.errors
|
||||
assert len(result.errors) == 1
|
||||
assert result.errors[0].message == (
|
||||
'Expected value of type \'JSONString\', found "{\\"key\\": 0,}"; '
|
||||
"Badly formed JSONString: Expecting property name enclosed in double quotes: line 1 column 11 (char 10)"
|
||||
)
|
||||
|
|
|
@ -191,21 +191,15 @@ def test_objecttype_as_container_all_kwargs():
|
|||
|
||||
|
||||
def test_objecttype_as_container_extra_args():
|
||||
with raises(TypeError) as excinfo:
|
||||
Container("1", "2", "3")
|
||||
|
||||
assert "__init__() takes from 1 to 3 positional arguments but 4 were given" == str(
|
||||
excinfo.value
|
||||
)
|
||||
msg = r"__init__\(\) takes from 1 to 3 positional arguments but 4 were given"
|
||||
with raises(TypeError, match=msg):
|
||||
Container("1", "2", "3") # type: ignore
|
||||
|
||||
|
||||
def test_objecttype_as_container_invalid_kwargs():
|
||||
with raises(TypeError) as excinfo:
|
||||
Container(unexisting_field="3")
|
||||
|
||||
assert "__init__() got an unexpected keyword argument 'unexisting_field'" == str(
|
||||
excinfo.value
|
||||
)
|
||||
msg = r"__init__\(\) got an unexpected keyword argument 'unexisting_field'"
|
||||
with raises(TypeError, match=msg):
|
||||
Container(unexisting_field="3") # type: ignore
|
||||
|
||||
|
||||
def test_objecttype_container_benchmark(benchmark):
|
||||
|
|
|
@ -229,11 +229,11 @@ def test_query_arguments():
|
|||
|
||||
result = test_schema.execute("{ test }", None)
|
||||
assert not result.errors
|
||||
assert result.data == {"test": '[null,{"a_str":null,"a_int":null}]'}
|
||||
assert result.data == {"test": "[null,{}]"}
|
||||
|
||||
result = test_schema.execute('{ test(aStr: "String!") }', "Source!")
|
||||
assert not result.errors
|
||||
assert result.data == {"test": '["Source!",{"a_str":"String!","a_int":null}]'}
|
||||
assert result.data == {"test": '["Source!",{"a_str":"String!"}]'}
|
||||
|
||||
result = test_schema.execute('{ test(aInt: -123, aStr: "String!") }', "Source!")
|
||||
assert not result.errors
|
||||
|
@ -258,7 +258,7 @@ def test_query_input_field():
|
|||
|
||||
result = test_schema.execute("{ test }", None)
|
||||
assert not result.errors
|
||||
assert result.data == {"test": '[null,{"a_input":null}]'}
|
||||
assert result.data == {"test": "[null,{}]"}
|
||||
|
||||
result = test_schema.execute('{ test(aInput: {aField: "String!"} ) }', "Source!")
|
||||
assert not result.errors
|
||||
|
|
|
@ -1,4 +1,7 @@
|
|||
from ..scalars import Scalar, Int, BigInt
|
||||
from ..objecttype import ObjectType, Field
|
||||
from ..scalars import Scalar, Int, BigInt, Float, String, Boolean
|
||||
from ..schema import Schema
|
||||
from graphql import Undefined
|
||||
from graphql.language.ast import IntValueNode
|
||||
|
||||
|
||||
|
@ -11,19 +14,295 @@ def test_scalar():
|
|||
|
||||
|
||||
def test_ints():
|
||||
assert Int.parse_value(2 ** 31 - 1) is not None
|
||||
assert Int.parse_value("2.0") is not None
|
||||
assert Int.parse_value(2 ** 31) is None
|
||||
assert Int.parse_value(2**31 - 1) is not Undefined
|
||||
assert Int.parse_value("2.0") == 2
|
||||
assert Int.parse_value(2**31) is Undefined
|
||||
|
||||
assert Int.parse_literal(IntValueNode(value=str(2 ** 31 - 1))) == 2 ** 31 - 1
|
||||
assert Int.parse_literal(IntValueNode(value=str(2 ** 31))) is None
|
||||
assert Int.parse_literal(IntValueNode(value=str(2**31 - 1))) == 2**31 - 1
|
||||
assert Int.parse_literal(IntValueNode(value=str(2**31))) is Undefined
|
||||
|
||||
assert Int.parse_value(-(2 ** 31)) is not None
|
||||
assert Int.parse_value(-(2 ** 31) - 1) is None
|
||||
assert Int.parse_value(-(2**31)) is not Undefined
|
||||
assert Int.parse_value(-(2**31) - 1) is Undefined
|
||||
|
||||
assert BigInt.parse_value(2 ** 31) is not None
|
||||
assert BigInt.parse_value("2.0") is not None
|
||||
assert BigInt.parse_value(-(2 ** 31) - 1) is not None
|
||||
assert BigInt.parse_value(2**31) is not Undefined
|
||||
assert BigInt.parse_value("2.0") == 2
|
||||
assert BigInt.parse_value(-(2**31) - 1) is not Undefined
|
||||
|
||||
assert BigInt.parse_literal(IntValueNode(value=str(2 ** 31 - 1))) == 2 ** 31 - 1
|
||||
assert BigInt.parse_literal(IntValueNode(value=str(2 ** 31))) == 2 ** 31
|
||||
assert BigInt.parse_literal(IntValueNode(value=str(2**31 - 1))) == 2**31 - 1
|
||||
assert BigInt.parse_literal(IntValueNode(value=str(2**31))) == 2**31
|
||||
|
||||
|
||||
def return_input(_parent, _info, input):
|
||||
return input
|
||||
|
||||
|
||||
class Optional(ObjectType):
|
||||
int = Int(input=Int(), resolver=return_input)
|
||||
big_int = BigInt(input=BigInt(), resolver=return_input)
|
||||
float = Float(input=Float(), resolver=return_input)
|
||||
bool = Boolean(input=Boolean(), resolver=return_input)
|
||||
string = String(input=String(), resolver=return_input)
|
||||
|
||||
|
||||
class Query(ObjectType):
|
||||
optional = Field(Optional)
|
||||
|
||||
def resolve_optional(self, info):
|
||||
return Optional()
|
||||
|
||||
def resolve_required(self, info, input):
|
||||
return input
|
||||
|
||||
|
||||
schema = Schema(query=Query)
|
||||
|
||||
|
||||
class TestInt:
|
||||
def test_query(self):
|
||||
"""
|
||||
Test that a normal query works.
|
||||
"""
|
||||
result = schema.execute("{ optional { int(input: 20) } }")
|
||||
assert not result.errors
|
||||
assert result.data == {"optional": {"int": 20}}
|
||||
|
||||
def test_optional_input(self):
|
||||
"""
|
||||
Test that we can provide a null value to an optional input
|
||||
"""
|
||||
result = schema.execute("{ optional { int(input: null) } }")
|
||||
assert not result.errors
|
||||
assert result.data == {"optional": {"int": None}}
|
||||
|
||||
def test_invalid_input(self):
|
||||
"""
|
||||
Test that if an invalid type is provided we get an error
|
||||
"""
|
||||
result = schema.execute('{ optional { int(input: "20") } }')
|
||||
assert result.errors
|
||||
assert len(result.errors) == 1
|
||||
assert (
|
||||
result.errors[0].message == 'Int cannot represent non-integer value: "20"'
|
||||
)
|
||||
|
||||
result = schema.execute('{ optional { int(input: "a") } }')
|
||||
assert result.errors
|
||||
assert len(result.errors) == 1
|
||||
assert result.errors[0].message == 'Int cannot represent non-integer value: "a"'
|
||||
|
||||
result = schema.execute("{ optional { int(input: true) } }")
|
||||
assert result.errors
|
||||
assert len(result.errors) == 1
|
||||
assert (
|
||||
result.errors[0].message == "Int cannot represent non-integer value: true"
|
||||
)
|
||||
|
||||
|
||||
class TestBigInt:
|
||||
def test_query(self):
|
||||
"""
|
||||
Test that a normal query works.
|
||||
"""
|
||||
value = 2**31
|
||||
result = schema.execute("{ optional { bigInt(input: %s) } }" % value)
|
||||
assert not result.errors
|
||||
assert result.data == {"optional": {"bigInt": value}}
|
||||
|
||||
def test_optional_input(self):
|
||||
"""
|
||||
Test that we can provide a null value to an optional input
|
||||
"""
|
||||
result = schema.execute("{ optional { bigInt(input: null) } }")
|
||||
assert not result.errors
|
||||
assert result.data == {"optional": {"bigInt": None}}
|
||||
|
||||
def test_invalid_input(self):
|
||||
"""
|
||||
Test that if an invalid type is provided we get an error
|
||||
"""
|
||||
result = schema.execute('{ optional { bigInt(input: "20") } }')
|
||||
assert result.errors
|
||||
assert len(result.errors) == 1
|
||||
assert (
|
||||
result.errors[0].message == "Expected value of type 'BigInt', found \"20\"."
|
||||
)
|
||||
|
||||
result = schema.execute('{ optional { bigInt(input: "a") } }')
|
||||
assert result.errors
|
||||
assert len(result.errors) == 1
|
||||
assert (
|
||||
result.errors[0].message == "Expected value of type 'BigInt', found \"a\"."
|
||||
)
|
||||
|
||||
result = schema.execute("{ optional { bigInt(input: true) } }")
|
||||
assert result.errors
|
||||
assert len(result.errors) == 1
|
||||
assert (
|
||||
result.errors[0].message == "Expected value of type 'BigInt', found true."
|
||||
)
|
||||
|
||||
|
||||
class TestFloat:
|
||||
def test_query(self):
|
||||
"""
|
||||
Test that a normal query works.
|
||||
"""
|
||||
result = schema.execute("{ optional { float(input: 20) } }")
|
||||
assert not result.errors
|
||||
assert result.data == {"optional": {"float": 20.0}}
|
||||
|
||||
result = schema.execute("{ optional { float(input: 20.2) } }")
|
||||
assert not result.errors
|
||||
assert result.data == {"optional": {"float": 20.2}}
|
||||
|
||||
def test_optional_input(self):
|
||||
"""
|
||||
Test that we can provide a null value to an optional input
|
||||
"""
|
||||
result = schema.execute("{ optional { float(input: null) } }")
|
||||
assert not result.errors
|
||||
assert result.data == {"optional": {"float": None}}
|
||||
|
||||
def test_invalid_input(self):
|
||||
"""
|
||||
Test that if an invalid type is provided we get an error
|
||||
"""
|
||||
result = schema.execute('{ optional { float(input: "20") } }')
|
||||
assert result.errors
|
||||
assert len(result.errors) == 1
|
||||
assert (
|
||||
result.errors[0].message == 'Float cannot represent non numeric value: "20"'
|
||||
)
|
||||
|
||||
result = schema.execute('{ optional { float(input: "a") } }')
|
||||
assert result.errors
|
||||
assert len(result.errors) == 1
|
||||
assert (
|
||||
result.errors[0].message == 'Float cannot represent non numeric value: "a"'
|
||||
)
|
||||
|
||||
result = schema.execute("{ optional { float(input: true) } }")
|
||||
assert result.errors
|
||||
assert len(result.errors) == 1
|
||||
assert (
|
||||
result.errors[0].message == "Float cannot represent non numeric value: true"
|
||||
)
|
||||
|
||||
|
||||
class TestBoolean:
|
||||
def test_query(self):
|
||||
"""
|
||||
Test that a normal query works.
|
||||
"""
|
||||
result = schema.execute("{ optional { bool(input: true) } }")
|
||||
assert not result.errors
|
||||
assert result.data == {"optional": {"bool": True}}
|
||||
|
||||
result = schema.execute("{ optional { bool(input: false) } }")
|
||||
assert not result.errors
|
||||
assert result.data == {"optional": {"bool": False}}
|
||||
|
||||
def test_optional_input(self):
|
||||
"""
|
||||
Test that we can provide a null value to an optional input
|
||||
"""
|
||||
result = schema.execute("{ optional { bool(input: null) } }")
|
||||
assert not result.errors
|
||||
assert result.data == {"optional": {"bool": None}}
|
||||
|
||||
def test_invalid_input(self):
|
||||
"""
|
||||
Test that if an invalid type is provided we get an error
|
||||
"""
|
||||
result = schema.execute('{ optional { bool(input: "True") } }')
|
||||
assert result.errors
|
||||
assert len(result.errors) == 1
|
||||
assert (
|
||||
result.errors[0].message
|
||||
== 'Boolean cannot represent a non boolean value: "True"'
|
||||
)
|
||||
|
||||
result = schema.execute('{ optional { bool(input: "true") } }')
|
||||
assert result.errors
|
||||
assert len(result.errors) == 1
|
||||
assert (
|
||||
result.errors[0].message
|
||||
== 'Boolean cannot represent a non boolean value: "true"'
|
||||
)
|
||||
|
||||
result = schema.execute('{ optional { bool(input: "a") } }')
|
||||
assert result.errors
|
||||
assert len(result.errors) == 1
|
||||
assert (
|
||||
result.errors[0].message
|
||||
== 'Boolean cannot represent a non boolean value: "a"'
|
||||
)
|
||||
|
||||
result = schema.execute("{ optional { bool(input: 1) } }")
|
||||
assert result.errors
|
||||
assert len(result.errors) == 1
|
||||
assert (
|
||||
result.errors[0].message
|
||||
== "Boolean cannot represent a non boolean value: 1"
|
||||
)
|
||||
|
||||
result = schema.execute("{ optional { bool(input: 0) } }")
|
||||
assert result.errors
|
||||
assert len(result.errors) == 1
|
||||
assert (
|
||||
result.errors[0].message
|
||||
== "Boolean cannot represent a non boolean value: 0"
|
||||
)
|
||||
|
||||
|
||||
class TestString:
|
||||
def test_query(self):
|
||||
"""
|
||||
Test that a normal query works.
|
||||
"""
|
||||
result = schema.execute('{ optional { string(input: "something something") } }')
|
||||
assert not result.errors
|
||||
assert result.data == {"optional": {"string": "something something"}}
|
||||
|
||||
result = schema.execute('{ optional { string(input: "True") } }')
|
||||
assert not result.errors
|
||||
assert result.data == {"optional": {"string": "True"}}
|
||||
|
||||
result = schema.execute('{ optional { string(input: "0") } }')
|
||||
assert not result.errors
|
||||
assert result.data == {"optional": {"string": "0"}}
|
||||
|
||||
def test_optional_input(self):
|
||||
"""
|
||||
Test that we can provide a null value to an optional input
|
||||
"""
|
||||
result = schema.execute("{ optional { string(input: null) } }")
|
||||
assert not result.errors
|
||||
assert result.data == {"optional": {"string": None}}
|
||||
|
||||
def test_invalid_input(self):
|
||||
"""
|
||||
Test that if an invalid type is provided we get an error
|
||||
"""
|
||||
result = schema.execute("{ optional { string(input: 1) } }")
|
||||
assert result.errors
|
||||
assert len(result.errors) == 1
|
||||
assert (
|
||||
result.errors[0].message == "String cannot represent a non string value: 1"
|
||||
)
|
||||
|
||||
result = schema.execute("{ optional { string(input: 3.2) } }")
|
||||
assert result.errors
|
||||
assert len(result.errors) == 1
|
||||
assert (
|
||||
result.errors[0].message
|
||||
== "String cannot represent a non string value: 3.2"
|
||||
)
|
||||
|
||||
result = schema.execute("{ optional { string(input: true) } }")
|
||||
assert result.errors
|
||||
assert len(result.errors) == 1
|
||||
assert (
|
||||
result.errors[0].message
|
||||
== "String cannot represent a non string value: true"
|
||||
)
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
from graphql import Undefined
|
||||
from ..scalars import Boolean, Float, Int, String
|
||||
|
||||
|
||||
|
@ -9,12 +10,12 @@ def test_serializes_output_int():
|
|||
assert Int.serialize(1.1) == 1
|
||||
assert Int.serialize(-1.1) == -1
|
||||
assert Int.serialize(1e5) == 100000
|
||||
assert Int.serialize(9876504321) is None
|
||||
assert Int.serialize(-9876504321) is None
|
||||
assert Int.serialize(1e100) is None
|
||||
assert Int.serialize(-1e100) is None
|
||||
assert Int.serialize(9876504321) is Undefined
|
||||
assert Int.serialize(-9876504321) is Undefined
|
||||
assert Int.serialize(1e100) is Undefined
|
||||
assert Int.serialize(-1e100) is Undefined
|
||||
assert Int.serialize("-1.1") == -1
|
||||
assert Int.serialize("one") is None
|
||||
assert Int.serialize("one") is Undefined
|
||||
assert Int.serialize(False) == 0
|
||||
assert Int.serialize(True) == 1
|
||||
|
||||
|
@ -27,7 +28,7 @@ def test_serializes_output_float():
|
|||
assert Float.serialize(1.1) == 1.1
|
||||
assert Float.serialize(-1.1) == -1.1
|
||||
assert Float.serialize("-1.1") == -1.1
|
||||
assert Float.serialize("one") is None
|
||||
assert Float.serialize("one") is Undefined
|
||||
assert Float.serialize(False) == 0
|
||||
assert Float.serialize(True) == 1
|
||||
|
||||
|
@ -38,7 +39,7 @@ def test_serializes_output_string():
|
|||
assert String.serialize(-1.1) == "-1.1"
|
||||
assert String.serialize(True) == "true"
|
||||
assert String.serialize(False) == "false"
|
||||
assert String.serialize(u"\U0001F601") == u"\U0001F601"
|
||||
assert String.serialize("\U0001F601") == "\U0001F601"
|
||||
|
||||
|
||||
def test_serializes_output_boolean():
|
||||
|
|
|
@ -1,7 +1,8 @@
|
|||
from graphql.type import GraphQLObjectType, GraphQLSchema
|
||||
from textwrap import dedent
|
||||
|
||||
from pytest import raises
|
||||
|
||||
from graphene.tests.utils import dedent
|
||||
from graphql.type import GraphQLObjectType, GraphQLSchema
|
||||
|
||||
from ..field import Field
|
||||
from ..objecttype import ObjectType
|
||||
|
@ -43,8 +44,10 @@ def test_schema_get_type_error():
|
|||
|
||||
def test_schema_str():
|
||||
schema = Schema(Query)
|
||||
assert str(schema) == dedent(
|
||||
"""
|
||||
assert (
|
||||
str(schema).strip()
|
||||
== dedent(
|
||||
"""
|
||||
type Query {
|
||||
inner: MyOtherType
|
||||
}
|
||||
|
@ -53,6 +56,7 @@ def test_schema_str():
|
|||
field: String
|
||||
}
|
||||
"""
|
||||
).strip()
|
||||
)
|
||||
|
||||
|
||||
|
|
|
@ -14,9 +14,7 @@ class Subscription(ObjectType):
|
|||
count_to_ten = Field(Int)
|
||||
|
||||
async def subscribe_count_to_ten(root, info):
|
||||
count = 0
|
||||
while count < 10:
|
||||
count += 1
|
||||
for count in range(1, 11):
|
||||
yield count
|
||||
|
||||
|
||||
|
|
|
@ -1,3 +1,4 @@
|
|||
from graphql import Undefined
|
||||
from graphql.type import (
|
||||
GraphQLArgument,
|
||||
GraphQLEnumType,
|
||||
|
@ -244,7 +245,9 @@ def test_objecttype_camelcase():
|
|||
foo_field = fields["fooBar"]
|
||||
assert isinstance(foo_field, GraphQLField)
|
||||
assert foo_field.args == {
|
||||
"barFoo": GraphQLArgument(GraphQLString, default_value=None, out_name="bar_foo")
|
||||
"barFoo": GraphQLArgument(
|
||||
GraphQLString, default_value=Undefined, out_name="bar_foo"
|
||||
)
|
||||
}
|
||||
|
||||
|
||||
|
@ -267,7 +270,7 @@ def test_objecttype_camelcase_disabled():
|
|||
assert isinstance(foo_field, GraphQLField)
|
||||
assert foo_field.args == {
|
||||
"bar_foo": GraphQLArgument(
|
||||
GraphQLString, default_value=None, out_name="bar_foo"
|
||||
GraphQLString, default_value=Undefined, out_name="bar_foo"
|
||||
)
|
||||
}
|
||||
|
||||
|
@ -286,3 +289,33 @@ def test_objecttype_with_possible_types():
|
|||
assert graphql_type.is_type_of
|
||||
assert graphql_type.is_type_of({}, None) is True
|
||||
assert graphql_type.is_type_of(MyObjectType(), None) is False
|
||||
|
||||
|
||||
def test_interface_with_interfaces():
|
||||
class FooInterface(Interface):
|
||||
foo = String()
|
||||
|
||||
class BarInterface(Interface):
|
||||
class Meta:
|
||||
interfaces = [FooInterface]
|
||||
|
||||
foo = String()
|
||||
bar = String()
|
||||
|
||||
type_map = create_type_map([FooInterface, BarInterface])
|
||||
assert "FooInterface" in type_map
|
||||
foo_graphql_type = type_map["FooInterface"]
|
||||
assert isinstance(foo_graphql_type, GraphQLInterfaceType)
|
||||
assert foo_graphql_type.name == "FooInterface"
|
||||
|
||||
assert "BarInterface" in type_map
|
||||
bar_graphql_type = type_map["BarInterface"]
|
||||
assert isinstance(bar_graphql_type, GraphQLInterfaceType)
|
||||
assert bar_graphql_type.name == "BarInterface"
|
||||
|
||||
fields = bar_graphql_type.fields
|
||||
assert list(fields) == ["foo", "bar"]
|
||||
assert isinstance(fields["foo"], GraphQLField)
|
||||
assert isinstance(fields["bar"], GraphQLField)
|
||||
|
||||
assert list(bar_graphql_type.interfaces) == list([foo_graphql_type])
|
||||
|
|
|
@ -1,14 +1,19 @@
|
|||
from ..objecttype import ObjectType
|
||||
from ..schema import Schema
|
||||
from ..uuid import UUID
|
||||
from ..structures import NonNull
|
||||
|
||||
|
||||
class Query(ObjectType):
|
||||
uuid = UUID(input=UUID())
|
||||
required_uuid = UUID(input=NonNull(UUID), required=True)
|
||||
|
||||
def resolve_uuid(self, info, input):
|
||||
return input
|
||||
|
||||
def resolve_required_uuid(self, info, input):
|
||||
return input
|
||||
|
||||
|
||||
schema = Schema(query=Query)
|
||||
|
||||
|
@ -29,3 +34,35 @@ def test_uuidstring_query_variable():
|
|||
)
|
||||
assert not result.errors
|
||||
assert result.data == {"uuid": uuid_value}
|
||||
|
||||
|
||||
def test_uuidstring_optional_uuid_input():
|
||||
"""
|
||||
Test that we can provide a null value to an optional input
|
||||
"""
|
||||
result = schema.execute("{ uuid(input: null) }")
|
||||
assert not result.errors
|
||||
assert result.data == {"uuid": None}
|
||||
|
||||
|
||||
def test_uuidstring_invalid_query():
|
||||
"""
|
||||
Test that if an invalid type is provided we get an error
|
||||
"""
|
||||
result = schema.execute("{ uuid(input: 1) }")
|
||||
assert result.errors
|
||||
assert len(result.errors) == 1
|
||||
assert result.errors[0].message == "Expected value of type 'UUID', found 1."
|
||||
|
||||
result = schema.execute('{ uuid(input: "a") }')
|
||||
assert result.errors
|
||||
assert len(result.errors) == 1
|
||||
assert (
|
||||
result.errors[0].message
|
||||
== "Expected value of type 'UUID', found \"a\"; badly formed hexadecimal UUID string"
|
||||
)
|
||||
|
||||
result = schema.execute("{ requiredUuid(input: null) }")
|
||||
assert result.errors
|
||||
assert len(result.errors) == 1
|
||||
assert result.errors[0].message == "Expected value of type 'UUID!', found null."
|
||||
|
|
|
@ -21,7 +21,7 @@ class Union(UnmountedType, BaseType):
|
|||
to determine which type is actually used when the field is resolved.
|
||||
|
||||
The schema in this example can take a search text and return any of the GraphQL object types
|
||||
indicated: Human, Droid or Startship.
|
||||
indicated: Human, Droid or Starship.
|
||||
|
||||
Ambiguous return types can be resolved on each ObjectType through ``Meta.possible_types``
|
||||
attribute or ``is_type_of`` method. Or by implementing ``resolve_type`` class method on the
|
||||
|
|
|
@ -2,6 +2,7 @@ from __future__ import absolute_import
|
|||
from uuid import UUID as _UUID
|
||||
|
||||
from graphql.language.ast import StringValueNode
|
||||
from graphql import Undefined
|
||||
|
||||
from .scalars import Scalar
|
||||
|
||||
|
@ -21,9 +22,10 @@ class UUID(Scalar):
|
|||
return str(uuid)
|
||||
|
||||
@staticmethod
|
||||
def parse_literal(node):
|
||||
def parse_literal(node, _variables=None):
|
||||
if isinstance(node, StringValueNode):
|
||||
return _UUID(node.value)
|
||||
return Undefined
|
||||
|
||||
@staticmethod
|
||||
def parse_value(value):
|
||||
|
|
|
@ -27,19 +27,18 @@ def import_string(dotted_path, dotted_attributes=None):
|
|||
|
||||
if not dotted_attributes:
|
||||
return result
|
||||
else:
|
||||
attributes = dotted_attributes.split(".")
|
||||
traveled_attributes = []
|
||||
try:
|
||||
for attribute in attributes:
|
||||
traveled_attributes.append(attribute)
|
||||
result = getattr(result, attribute)
|
||||
return result
|
||||
except AttributeError:
|
||||
raise ImportError(
|
||||
'Module "%s" does not define a "%s" attribute inside attribute/class "%s"'
|
||||
% (module_path, ".".join(traveled_attributes), class_name)
|
||||
)
|
||||
attributes = dotted_attributes.split(".")
|
||||
traveled_attributes = []
|
||||
try:
|
||||
for attribute in attributes:
|
||||
traveled_attributes.append(attribute)
|
||||
result = getattr(result, attribute)
|
||||
return result
|
||||
except AttributeError:
|
||||
raise ImportError(
|
||||
'Module "%s" does not define a "%s" attribute inside attribute/class "%s"'
|
||||
% (module_path, ".".join(traveled_attributes), class_name)
|
||||
)
|
||||
|
||||
|
||||
def lazy_import(dotted_path, dotted_attributes=None):
|
||||
|
|
|
@ -94,6 +94,7 @@ TEST_DATA = {
|
|||
],
|
||||
"movies": {
|
||||
"1198359": {
|
||||
"id": "1198359",
|
||||
"name": "King Arthur: Legend of the Sword",
|
||||
"synopsis": (
|
||||
"When the child Arthur's father is murdered, Vortigern, "
|
||||
|
@ -159,7 +160,7 @@ def test_example_end_to_end():
|
|||
"date": "2017-05-19",
|
||||
"movie": {
|
||||
"__typename": "Movie",
|
||||
"id": "TW92aWU6Tm9uZQ==",
|
||||
"id": "TW92aWU6MTE5ODM1OQ==",
|
||||
"name": "King Arthur: Legend of the Sword",
|
||||
"synopsis": (
|
||||
"When the child Arthur's father is murdered, Vortigern, "
|
||||
|
@ -172,7 +173,7 @@ def test_example_end_to_end():
|
|||
"__typename": "Event",
|
||||
"id": "RXZlbnQ6MjM0",
|
||||
"date": "2017-05-20",
|
||||
"movie": {"__typename": "Movie", "id": "TW92aWU6Tm9uZQ=="},
|
||||
"movie": {"__typename": "Movie", "id": "TW92aWU6MTE5ODM1OQ=="},
|
||||
},
|
||||
]
|
||||
}
|
||||
|
|
|
@ -38,4 +38,4 @@ def test_orderedtype_non_orderabletypes():
|
|||
|
||||
assert one.__lt__(1) == NotImplemented
|
||||
assert one.__gt__(1) == NotImplemented
|
||||
assert not one == 1
|
||||
assert one != 1
|
||||
|
|
|
@ -18,14 +18,12 @@ schema = Schema(query=Query)
|
|||
def run_query(query: str):
|
||||
document = parse(query)
|
||||
|
||||
errors = validate(
|
||||
return validate(
|
||||
schema=schema.graphql_schema,
|
||||
document_ast=document,
|
||||
rules=(DisableIntrospection,),
|
||||
)
|
||||
|
||||
return errors
|
||||
|
||||
|
||||
def test_disallows_introspection_queries():
|
||||
errors = run_query("{ __schema { queryType { name } } }")
|
||||
|
|
29
setup.py
29
setup.py
|
@ -45,20 +45,21 @@ class PyTest(TestCommand):
|
|||
|
||||
|
||||
tests_require = [
|
||||
"pytest>=5.3,<6",
|
||||
"pytest-benchmark>=3.2,<4",
|
||||
"pytest-cov>=2.8,<3",
|
||||
"pytest-mock>=2,<3",
|
||||
"pytest-asyncio>=0.10,<2",
|
||||
"snapshottest>=0.5,<1",
|
||||
"coveralls>=1.11,<2",
|
||||
"pytest>=6,<7",
|
||||
"pytest-benchmark>=3.4,<4",
|
||||
"pytest-cov>=3,<4",
|
||||
"pytest-mock>=3,<4",
|
||||
"pytest-asyncio>=0.16,<2",
|
||||
"snapshottest>=0.6,<1",
|
||||
"coveralls>=3.3,<4",
|
||||
"promise>=2.3,<3",
|
||||
"mock>=4.0,<5",
|
||||
"pytz==2021.1",
|
||||
"iso8601>=0.1,<2",
|
||||
"aiodataloader<1",
|
||||
"mock>=4,<5",
|
||||
"pytz==2022.1",
|
||||
"iso8601>=1,<2",
|
||||
]
|
||||
|
||||
dev_requires = ["black==19.10b0", "flake8>=3.7,<4"] + tests_require
|
||||
dev_requires = ["black==22.3.0", "flake8>=4,<5"] + tests_require
|
||||
|
||||
setup(
|
||||
name="graphene",
|
||||
|
@ -78,12 +79,14 @@ setup(
|
|||
"Programming Language :: Python :: 3.6",
|
||||
"Programming Language :: Python :: 3.7",
|
||||
"Programming Language :: Python :: 3.8",
|
||||
"Programming Language :: Python :: 3.9",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
],
|
||||
keywords="api graphql protocol rest relay graphene",
|
||||
packages=find_packages(exclude=["examples*"]),
|
||||
install_requires=[
|
||||
"graphql-core>=3.1.2,<4",
|
||||
"graphql-relay>=3.0,<4",
|
||||
"graphql-core>=3.1,<3.3",
|
||||
"graphql-relay>=3.1,<3.3",
|
||||
"aniso8601>=8,<10",
|
||||
],
|
||||
tests_require=tests_require,
|
||||
|
|
79
tests_asyncio/test_dataloader.py
Normal file
79
tests_asyncio/test_dataloader.py
Normal file
|
@ -0,0 +1,79 @@
|
|||
from collections import namedtuple
|
||||
from unittest.mock import Mock
|
||||
from pytest import mark
|
||||
from aiodataloader import DataLoader
|
||||
|
||||
from graphene import ObjectType, String, Schema, Field, List
|
||||
|
||||
|
||||
CHARACTERS = {
|
||||
"1": {"name": "Luke Skywalker", "sibling": "3"},
|
||||
"2": {"name": "Darth Vader", "sibling": None},
|
||||
"3": {"name": "Leia Organa", "sibling": "1"},
|
||||
}
|
||||
|
||||
|
||||
get_character = Mock(side_effect=lambda character_id: CHARACTERS[character_id])
|
||||
|
||||
|
||||
class CharacterType(ObjectType):
|
||||
name = String()
|
||||
sibling = Field(lambda: CharacterType)
|
||||
|
||||
async def resolve_sibling(character, info):
|
||||
if character["sibling"]:
|
||||
return await info.context.character_loader.load(character["sibling"])
|
||||
return None
|
||||
|
||||
|
||||
class Query(ObjectType):
|
||||
skywalker_family = List(CharacterType)
|
||||
|
||||
async def resolve_skywalker_family(_, info):
|
||||
return await info.context.character_loader.load_many(["1", "2", "3"])
|
||||
|
||||
|
||||
mock_batch_load_fn = Mock(
|
||||
side_effect=lambda character_ids: [get_character(id) for id in character_ids]
|
||||
)
|
||||
|
||||
|
||||
class CharacterLoader(DataLoader):
|
||||
async def batch_load_fn(self, character_ids):
|
||||
return mock_batch_load_fn(character_ids)
|
||||
|
||||
|
||||
Context = namedtuple("Context", "character_loader")
|
||||
|
||||
|
||||
@mark.asyncio
|
||||
async def test_basic_dataloader():
|
||||
schema = Schema(query=Query)
|
||||
|
||||
character_loader = CharacterLoader()
|
||||
context = Context(character_loader=character_loader)
|
||||
|
||||
query = """
|
||||
{
|
||||
skywalkerFamily {
|
||||
name
|
||||
sibling {
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
|
||||
result = await schema.execute_async(query, context=context)
|
||||
|
||||
assert not result.errors
|
||||
assert result.data == {
|
||||
"skywalkerFamily": [
|
||||
{"name": "Luke Skywalker", "sibling": {"name": "Leia Organa"}},
|
||||
{"name": "Darth Vader", "sibling": None},
|
||||
{"name": "Leia Organa", "sibling": {"name": "Luke Skywalker"}},
|
||||
]
|
||||
}
|
||||
|
||||
assert mock_batch_load_fn.call_count == 1
|
||||
assert get_character.call_count == 3
|
16
tox.ini
16
tox.ini
|
@ -1,5 +1,5 @@
|
|||
[tox]
|
||||
envlist = flake8,py36,py37,py38,pre-commit,mypy
|
||||
envlist = py3{6,7,8,9,10}, flake8, mypy, pre-commit
|
||||
skipsdist = true
|
||||
|
||||
[testenv]
|
||||
|
@ -8,28 +8,28 @@ deps =
|
|||
setenv =
|
||||
PYTHONPATH = .:{envdir}
|
||||
commands =
|
||||
py{36,37,38}: pytest --cov=graphene graphene examples {posargs}
|
||||
py{36,37,38,39,310}: pytest --cov=graphene graphene --cov-report=term --cov-report=xml examples {posargs}
|
||||
|
||||
[testenv:pre-commit]
|
||||
basepython=python3.8
|
||||
basepython = python3.9
|
||||
deps =
|
||||
pre-commit>=2,<3
|
||||
pre-commit>=2.16,<3
|
||||
setenv =
|
||||
LC_CTYPE=en_US.UTF-8
|
||||
commands =
|
||||
pre-commit run --all-files --show-diff-on-failure
|
||||
|
||||
[testenv:mypy]
|
||||
basepython=python3.8
|
||||
basepython = python3.9
|
||||
deps =
|
||||
mypy>=0.761,<1
|
||||
mypy>=0.950,<1
|
||||
commands =
|
||||
mypy graphene
|
||||
|
||||
[testenv:flake8]
|
||||
basepython=python3.8
|
||||
basepython = python3.9
|
||||
deps =
|
||||
flake8>=3.8,<4
|
||||
flake8>=4,<5
|
||||
commands =
|
||||
pip install --pre -e .
|
||||
flake8 graphene
|
||||
|
|
Loading…
Reference in New Issue
Block a user