Compare commits

..

No commits in common. "master" and "v3.2.2" have entirely different histories.

69 changed files with 2176 additions and 717 deletions

View File

@ -1,21 +0,0 @@
name: 📦 Build
on: [push, pull_request]
jobs:
build:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- name: Set up Python 3.10
uses: actions/setup-python@v5
with:
python-version: "3.10"
- name: Install dependencies
run: |
python -m pip install --upgrade pip
pip install build twine
- name: Building package
run: python3 -m build
- name: Check package with Twine
run: twine check dist/*

View File

@ -10,9 +10,9 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Set up Python 3.10
uses: actions/setup-python@v5
uses: actions/setup-python@v4
with:
python-version: "3.10"
- name: Build wheel and source tarball

View File

@ -7,9 +7,9 @@ jobs:
runs-on: ubuntu-latest
steps:
- uses: actions/checkout@v4
- uses: actions/checkout@v3
- name: Set up Python 3.10
uses: actions/setup-python@v5
uses: actions/setup-python@v4
with:
python-version: "3.10"
- name: Install dependencies

View File

@ -25,15 +25,15 @@ jobs:
fail-fast: false
matrix:
include:
- {name: '3.13', python: '3.13', os: ubuntu-latest, tox: py313}
- {name: '3.12', python: '3.12', os: ubuntu-latest, tox: py312}
- {name: '3.11', python: '3.11', os: ubuntu-latest, tox: py311}
- {name: '3.11', python: '3.11-dev', os: ubuntu-latest, tox: py311}
- {name: '3.10', python: '3.10', os: ubuntu-latest, tox: py310}
- {name: '3.9', python: '3.9', os: ubuntu-latest, tox: py39}
- {name: '3.8', python: '3.8', os: ubuntu-latest, tox: py38}
- {name: '3.7', python: '3.7', os: ubuntu-latest, tox: py37}
- {name: '3.6', python: '3.6', os: ubuntu-20.04, tox: py36}
steps:
- uses: actions/checkout@v4
- uses: actions/setup-python@v5
- uses: actions/checkout@v3
- uses: actions/setup-python@v4
with:
python-version: ${{ matrix.python }}
@ -44,21 +44,23 @@ jobs:
- name: get pip cache dir
id: pip-cache
run: echo "dir=$(pip cache dir)" >> $GITHUB_OUTPUT
run: echo "::set-output name=dir::$(pip cache dir)"
- name: cache pip dependencies
uses: actions/cache@v3
with:
path: ${{ steps.pip-cache.outputs.dir }}
key: pip|${{ runner.os }}|${{ matrix.python }}|${{ hashFiles('setup.py') }}
- run: pip install tox
- run: tox -e ${{ matrix.tox }}
- name: Upload coverage.xml
if: ${{ matrix.python == '3.10' }}
uses: actions/upload-artifact@v4
uses: actions/upload-artifact@v3
with:
name: graphene-coverage
path: coverage.xml
if-no-files-found: error
- name: Upload coverage.xml to codecov
if: ${{ matrix.python == '3.10' }}
uses: codecov/codecov-action@v4
uses: codecov/codecov-action@v3

1
.gitignore vendored
View File

@ -90,4 +90,3 @@ venv/
*.sqlite3
.vscode
.mypy_cache
.ruff_cache

2
.isort.cfg Normal file
View File

@ -0,0 +1,2 @@
[settings]
known_third_party = aniso8601,graphql,graphql_relay,promise,pytest,pytz,pyutils,setuptools,snapshottest,sphinx_graphene_theme

View File

@ -20,10 +20,11 @@ repos:
rev: v2.37.3
hooks:
- id: pyupgrade
- repo: https://github.com/astral-sh/ruff-pre-commit
# Ruff version.
rev: v0.5.0
- repo: https://github.com/psf/black
rev: 22.6.0
hooks:
- id: ruff
- id: ruff-format
args: [ --check ]
- id: black
- repo: https://github.com/PyCQA/flake8
rev: 5.0.4
hooks:
- id: flake8

View File

@ -1,4 +1,4 @@
# ![Graphene Logo](http://graphene-python.org/favicon.png) [Graphene](http://graphene-python.org) [![PyPI version](https://badge.fury.io/py/graphene.svg)](https://badge.fury.io/py/graphene) [![Coverage Status](https://coveralls.io/repos/graphql-python/graphene/badge.svg?branch=master&service=github)](https://coveralls.io/github/graphql-python/graphene?branch=master) [![](https://dcbadge.vercel.app/api/server/T6Gp6NFYHe?style=flat)](https://discord.gg/T6Gp6NFYHe)
# ![Graphene Logo](http://graphene-python.org/favicon.png) [Graphene](http://graphene-python.org) [![Build Status](https://travis-ci.org/graphql-python/graphene.svg?branch=master)](https://travis-ci.org/graphql-python/graphene) [![PyPI version](https://badge.fury.io/py/graphene.svg)](https://badge.fury.io/py/graphene) [![Coverage Status](https://coveralls.io/repos/graphql-python/graphene/badge.svg?branch=master&service=github)](https://coveralls.io/github/graphql-python/graphene?branch=master) [![](https://dcbadge.vercel.app/api/server/T6Gp6NFYHe?style=flat)](https://discord.gg/T6Gp6NFYHe)
[💬 Join the community on Discord](https://discord.gg/T6Gp6NFYHe)

171
README.rst Normal file
View File

@ -0,0 +1,171 @@
|Graphene Logo| `Graphene <http://graphene-python.org>`__ |Build Status| |PyPI version| |Coverage Status|
=========================================================================================================
`💬 Join the community on
Slack <https://join.slack.com/t/graphenetools/shared_invite/enQtOTE2MDQ1NTg4MDM1LTA4Nzk0MGU0NGEwNzUxZGNjNDQ4ZjAwNDJjMjY0OGE1ZDgxZTg4YjM2ZTc4MjE2ZTAzZjE2ZThhZTQzZTkyMmM>`__
**We are looking for contributors**! Please check the
`ROADMAP <https://github.com/graphql-python/graphene/blob/master/ROADMAP.md>`__
to see how you can help ❤️
Introduction
------------
`Graphene <http://graphene-python.org>`__ is an opinionated Python
library for building GraphQL schemas/types fast and easily.
- **Easy to use:** Graphene helps you use GraphQL in Python without
effort.
- **Relay:** Graphene has builtin support for Relay.
- **Data agnostic:** Graphene supports any kind of data source: SQL
(Django, SQLAlchemy), NoSQL, custom Python objects, etc. We believe
that by providing a complete API you could plug Graphene anywhere
your data lives and make your data available through GraphQL.
Integrations
------------
Graphene has multiple integrations with different frameworks:
+-------------------+-------------------------------------------------+
| integration | Package |
+===================+=================================================+
| Django | `graphene-django <https:/ |
| | /github.com/graphql-python/graphene-django/>`__ |
+-------------------+-------------------------------------------------+
| SQLAlchemy | `graphene-sqlalchemy <https://git |
| | hub.com/graphql-python/graphene-sqlalchemy/>`__ |
+-------------------+-------------------------------------------------+
Also, Graphene is fully compatible with the GraphQL spec, working
seamlessly with all GraphQL clients, such as
`Relay <https://github.com/facebook/relay>`__,
`Apollo <https://github.com/apollographql/apollo-client>`__ and
`gql <https://github.com/graphql-python/gql>`__.
Installation
------------
To install `graphene`, just run this command in your shell
.. code:: bash
pip install "graphene>=3.0"
Examples
--------
Here is one example for you to get started:
.. code:: python
import graphene
class Query(graphene.ObjectType):
hello = graphene.String(description='A typical hello world')
def resolve_hello(self, info):
return 'World'
schema = graphene.Schema(query=Query)
Then Querying ``graphene.Schema`` is as simple as:
.. code:: python
query = '''
query SayHello {
hello
}
'''
result = schema.execute(query)
If you want to learn even more, you can also check the following
`examples <examples/>`__:
- **Basic Schema**: `Starwars example <examples/starwars>`__
- **Relay Schema**: `Starwars Relay
example <examples/starwars_relay>`__
Documentation
-------------
Documentation and links to additional resources are available at
https://docs.graphene-python.org/en/latest/
Contributing
------------
After cloning this repo, create a
`virtualenv <https://virtualenv.pypa.io/en/stable/>`__ and ensure
dependencies are installed by running:
.. code:: sh
virtualenv venv
source venv/bin/activate
pip install -e ".[test]"
Well-written tests and maintaining good test coverage is important to
this project. While developing, run new and existing tests with:
.. code:: sh
py.test graphene/relay/tests/test_node.py # Single file
py.test graphene/relay # All tests in directory
Add the ``-s`` flag if you have introduced breakpoints into the code for
debugging. Add the ``-v`` (“verbose”) flag to get more detailed test
output. For even more detailed output, use ``-vv``. Check out the
`pytest documentation <https://docs.pytest.org/en/latest/>`__ for more
options and test running controls.
You can also run the benchmarks with:
.. code:: sh
py.test graphene --benchmark-only
Graphene supports several versions of Python. To make sure that changes
do not break compatibility with any of those versions, we use ``tox`` to
create virtualenvs for each Python version and run tests with that
version. To run against all Python versions defined in the ``tox.ini``
config file, just run:
.. code:: sh
tox
If you wish to run against a specific version defined in the ``tox.ini``
file:
.. code:: sh
tox -e py36
Tox can only use whatever versions of Python are installed on your
system. When you create a pull request, Travis will also be running the
same tests and report the results, so there is no need for potential
contributors to try to install every single version of Python on their
own system ahead of time. We appreciate opening issues and pull requests
to make graphene even more stable & useful!
Building Documentation
~~~~~~~~~~~~~~~~~~~~~~
The documentation is generated using the excellent
`Sphinx <http://www.sphinx-doc.org/>`__ and a custom theme.
An HTML version of the documentation is produced by running:
.. code:: sh
make docs
.. |Graphene Logo| image:: http://graphene-python.org/favicon.png
.. |Build Status| image:: https://travis-ci.org/graphql-python/graphene.svg?branch=master
:target: https://travis-ci.org/graphql-python/graphene
.. |PyPI version| image:: https://badge.fury.io/py/graphene.svg
:target: https://badge.fury.io/py/graphene
.. |Coverage Status| image:: https://coveralls.io/repos/graphql-python/graphene/badge.svg?branch=master&service=github
:target: https://coveralls.io/github/graphql-python/graphene?branch=master

54
ROADMAP.md Normal file
View File

@ -0,0 +1,54 @@
# GraphQL Python Roadmap
In order to move Graphene and the GraphQL Python ecosystem forward it's essential to be clear with the community on next steps, so we can move uniformly.
_👋 If you have more ideas on how to move the Graphene ecosystem forward, don't hesistate to [open a PR](https://github.com/graphql-python/graphene/edit/master/ROADMAP.md)_
## Now
- [ ] Continue to support v2.x with security releases
- [ ] Last major/feature release is cut and graphene-* libraries should pin to that version number
## Next
New features will only be developed on version 3 of ecosystem libraries.
### [Core-Next](https://github.com/graphql-python/graphql-core-next)
Targeted as v3 of [graphql-core](https://pypi.org/project/graphql-core/), Python 3 only
### Graphene
- [ ] Integrate with the core-next API and resolve all breaking changes
- [ ] GraphQL types from type annotations - [See issue](https://github.com/graphql-python/graphene/issues/729)
- [ ] Add support for coroutines in Connection, Mutation (abstracting out Promise requirement) - [See PR](https://github.com/graphql-python/graphene/pull/824)
### Graphene-*
- [ ] Integrate with the graphene core-next API and resolve all breaking changes
### *-graphql
- [ ] Integrate with the graphql core-next API and resolve all breaking changes
## Ongoing Initiatives
- [ ] Improve documentation, especially for new users to the library
- [ ] Recipes for “quick start” that people can ideally use/run
## Dependent Libraries
| Repo | Release Manager | CODEOWNERS | Pinned | next/master created | Labels Standardized |
| ---------------------------------------------------------------------------- | --------------- | ---------- | ---------- | ------------------- | ------------------- |
| [graphene](https://github.com/graphql-python/graphene) | ekampf | ✅ | | ✅ | |
| [graphql-core](https://github.com/graphql-python/graphql-core) | Cito | ✅ | N/A | N/A | |
| [graphql-core-next](https://github.com/graphql-python/graphql-core-next) | Cito | ✅ | N/A | N/A | |
| [graphql-server-core](https://github.com/graphql-python/graphql-server-core) | Cito | | ✅ | ✅ | |
| [gql](https://github.com/graphql-python/gql) | ekampf | | | | |
| [gql-next](https://github.com/graphql-python/gql-next) | ekampf | | N/A | N/A | |
| ...[aiohttp](https://github.com/graphql-python/aiohttp-graphql) | | | | | |
| ...[django](https://github.com/graphql-python/graphene-django) | mvanlonden | | ✅ | ✅ | |
| ...[sanic](https://github.com/graphql-python/sanic-graphql) | ekampf | | | | |
| ...[flask](https://github.com/graphql-python/flask-graphql) | | | | | |
| ...[webob](https://github.com/graphql-python/webob-graphql) | | | | | |
| ...[tornado](https://github.com/graphql-python/graphene-tornado) | ewhauser | | PR created | ✅ | |
| ...[ws](https://github.com/graphql-python/graphql-ws) | Cito/dfee | | ✅ | ✅ | |
| ...[gae](https://github.com/graphql-python/graphene-gae) | ekampf | | PR created | ✅ | |
| ...[sqlalchemy](https://github.com/graphql-python/graphene-sqlalchemy) | jnak/Nabell | ✅ | ✅ | ✅ | |
| ...[mongo](https://github.com/graphql-python/graphene-mongo) | | | ✅ | ✅ | |
| ...[relay-py](https://github.com/graphql-python/graphql-relay-py) | Cito | | | | |
| ...[wsgi](https://github.com/moritzmhmk/wsgi-graphql) | | | | | |

View File

@ -1,15 +0,0 @@
# Security Policy
## Supported Versions
Support for security issues is currently provided for Graphene 3.0 and above. Support on earlier versions cannot be guaranteed by the maintainers of this library, but community PRs may be accepted in critical cases.
The preferred mitigation strategy is via an upgrade to Graphene 3.
| Version | Supported |
| ------- | ------------------ |
| 3.x | :white_check_mark: |
| <3.x | :x: |
## Reporting a Vulnerability
Please use responsible disclosure by contacting a core maintainer via Discord or E-Mail.

7
bin/autolinter Executable file
View File

@ -0,0 +1,7 @@
#!/bin/bash
# Install the required scripts with
# pip install autoflake autopep8 isort
autoflake ./examples/ ./graphene/ -r --remove-unused-variables --remove-all-unused-imports --in-place
autopep8 ./examples/ ./graphene/ -r --in-place --experimental --aggressive --max-line-length 120
isort -rc ./examples/ ./graphene/

View File

@ -1,5 +1,4 @@
import os
import sys
import sphinx_graphene_theme
@ -23,6 +22,8 @@ on_rtd = os.environ.get("READTHEDOCS", None) == "True"
# add these directories to sys.path here. If the directory is relative to the
# documentation root, use os.path.abspath to make it absolute, like shown here.
#
import os
import sys
sys.path.insert(0, os.path.abspath(".."))

View File

@ -59,7 +59,7 @@ When we send a **Query** requesting only one **Field**, ``hello``, and specify a
Requirements
~~~~~~~~~~~~
- Python (3.8, 3.9, 3.10, 3.11, 3.12, pypy)
- Python (3.6, 3.7, 3.8, 3.9, 3.10, pypy)
- Graphene (3.0)
Project setup

View File

@ -69,3 +69,43 @@ You can also add extra keyword arguments to the ``execute`` method, such as
'hey': 'hello Peter!'
}
}
Snapshot testing
~~~~~~~~~~~~~~~~
As our APIs evolve, we need to know when our changes introduce any breaking changes that might break
some of the clients of our GraphQL app.
However, writing tests and replicating the same response we expect from our GraphQL application can be a
tedious and repetitive task, and sometimes it's easier to skip this process.
Because of that, we recommend the usage of `SnapshotTest <https://github.com/syrusakbary/snapshottest/>`_.
SnapshotTest lets us write all these tests in a breeze, as it automatically creates the ``snapshots`` for us
the first time the test are executed.
Here is a simple example on how our tests will look if we use ``pytest``:
.. code:: python
def test_hey(snapshot):
client = Client(my_schema)
# This will create a snapshot dir and a snapshot file
# the first time the test is executed, with the response
# of the execution.
snapshot.assert_match(client.execute('''{ hey }'''))
If we are using ``unittest``:
.. code:: python
from snapshottest import TestCase
class APITestCase(TestCase):
def test_api_me(self):
"""Testing the API for /me"""
client = Client(my_schema)
self.assertMatchSnapshot(client.execute('''{ hey }'''))

View File

@ -80,10 +80,6 @@ If we have a schema with Person type and one field on the root query.
from graphene import ObjectType, String, Field
def get_human(name):
first_name, last_name = name.split()
return Person(first_name, last_name)
class Person(ObjectType):
full_name = String()

View File

@ -8,6 +8,7 @@ class Patron(graphene.ObjectType):
class Query(graphene.ObjectType):
patron = graphene.Field(Patron)
def resolve_patron(root, info):

View File

@ -0,0 +1,100 @@
# -*- coding: utf-8 -*-
# snapshottest: v1 - https://goo.gl/zC4yUc
from __future__ import unicode_literals
from snapshottest import Snapshot
snapshots = Snapshot()
snapshots["test_hero_name_query 1"] = {"data": {"hero": {"name": "R2-D2"}}}
snapshots["test_hero_name_and_friends_query 1"] = {
"data": {
"hero": {
"id": "2001",
"name": "R2-D2",
"friends": [
{"name": "Luke Skywalker"},
{"name": "Han Solo"},
{"name": "Leia Organa"},
],
}
}
}
snapshots["test_nested_query 1"] = {
"data": {
"hero": {
"name": "R2-D2",
"friends": [
{
"name": "Luke Skywalker",
"appearsIn": ["NEWHOPE", "EMPIRE", "JEDI"],
"friends": [
{"name": "Han Solo"},
{"name": "Leia Organa"},
{"name": "C-3PO"},
{"name": "R2-D2"},
],
},
{
"name": "Han Solo",
"appearsIn": ["NEWHOPE", "EMPIRE", "JEDI"],
"friends": [
{"name": "Luke Skywalker"},
{"name": "Leia Organa"},
{"name": "R2-D2"},
],
},
{
"name": "Leia Organa",
"appearsIn": ["NEWHOPE", "EMPIRE", "JEDI"],
"friends": [
{"name": "Luke Skywalker"},
{"name": "Han Solo"},
{"name": "C-3PO"},
{"name": "R2-D2"},
],
},
],
}
}
}
snapshots["test_fetch_luke_query 1"] = {"data": {"human": {"name": "Luke Skywalker"}}}
snapshots["test_fetch_some_id_query 1"] = {
"data": {"human": {"name": "Luke Skywalker"}}
}
snapshots["test_fetch_some_id_query2 1"] = {"data": {"human": {"name": "Han Solo"}}}
snapshots["test_invalid_id_query 1"] = {"data": {"human": None}}
snapshots["test_fetch_luke_aliased 1"] = {"data": {"luke": {"name": "Luke Skywalker"}}}
snapshots["test_fetch_luke_and_leia_aliased 1"] = {
"data": {"luke": {"name": "Luke Skywalker"}, "leia": {"name": "Leia Organa"}}
}
snapshots["test_duplicate_fields 1"] = {
"data": {
"luke": {"name": "Luke Skywalker", "homePlanet": "Tatooine"},
"leia": {"name": "Leia Organa", "homePlanet": "Alderaan"},
}
}
snapshots["test_use_fragment 1"] = {
"data": {
"luke": {"name": "Luke Skywalker", "homePlanet": "Tatooine"},
"leia": {"name": "Leia Organa", "homePlanet": "Alderaan"},
}
}
snapshots["test_check_type_of_r2 1"] = {
"data": {"hero": {"__typename": "Droid", "name": "R2-D2"}}
}
snapshots["test_check_type_of_luke 1"] = {
"data": {"hero": {"__typename": "Human", "name": "Luke Skywalker"}}
}

View File

@ -8,19 +8,19 @@ setup()
client = Client(schema)
def test_hero_name_query():
result = client.execute("""
def test_hero_name_query(snapshot):
query = """
query HeroNameQuery {
hero {
name
}
}
""")
assert result == {"data": {"hero": {"name": "R2-D2"}}}
"""
snapshot.assert_match(client.execute(query))
def test_hero_name_and_friends_query():
result = client.execute("""
def test_hero_name_and_friends_query(snapshot):
query = """
query HeroNameAndFriendsQuery {
hero {
id
@ -30,24 +30,12 @@ def test_hero_name_and_friends_query():
}
}
}
""")
assert result == {
"data": {
"hero": {
"id": "2001",
"name": "R2-D2",
"friends": [
{"name": "Luke Skywalker"},
{"name": "Han Solo"},
{"name": "Leia Organa"},
],
}
}
}
"""
snapshot.assert_match(client.execute(query))
def test_nested_query():
result = client.execute("""
def test_nested_query(snapshot):
query = """
query NestedQuery {
hero {
name
@ -60,113 +48,70 @@ def test_nested_query():
}
}
}
""")
assert result == {
"data": {
"hero": {
"name": "R2-D2",
"friends": [
{
"name": "Luke Skywalker",
"appearsIn": ["NEWHOPE", "EMPIRE", "JEDI"],
"friends": [
{"name": "Han Solo"},
{"name": "Leia Organa"},
{"name": "C-3PO"},
{"name": "R2-D2"},
],
},
{
"name": "Han Solo",
"appearsIn": ["NEWHOPE", "EMPIRE", "JEDI"],
"friends": [
{"name": "Luke Skywalker"},
{"name": "Leia Organa"},
{"name": "R2-D2"},
],
},
{
"name": "Leia Organa",
"appearsIn": ["NEWHOPE", "EMPIRE", "JEDI"],
"friends": [
{"name": "Luke Skywalker"},
{"name": "Han Solo"},
{"name": "C-3PO"},
{"name": "R2-D2"},
],
},
],
}
}
}
"""
snapshot.assert_match(client.execute(query))
def test_fetch_luke_query():
result = client.execute("""
def test_fetch_luke_query(snapshot):
query = """
query FetchLukeQuery {
human(id: "1000") {
name
}
}
""")
assert result == {"data": {"human": {"name": "Luke Skywalker"}}}
"""
snapshot.assert_match(client.execute(query))
def test_fetch_some_id_query():
result = client.execute(
"""
def test_fetch_some_id_query(snapshot):
query = """
query FetchSomeIDQuery($someId: String!) {
human(id: $someId) {
name
}
}
""",
variables={"someId": "1000"},
)
assert result == {"data": {"human": {"name": "Luke Skywalker"}}}
"""
params = {"someId": "1000"}
snapshot.assert_match(client.execute(query, variables=params))
def test_fetch_some_id_query2():
result = client.execute(
"""
def test_fetch_some_id_query2(snapshot):
query = """
query FetchSomeIDQuery($someId: String!) {
human(id: $someId) {
name
}
}
""",
variables={"someId": "1002"},
)
assert result == {"data": {"human": {"name": "Han Solo"}}}
"""
params = {"someId": "1002"}
snapshot.assert_match(client.execute(query, variables=params))
def test_invalid_id_query():
result = client.execute(
"""
def test_invalid_id_query(snapshot):
query = """
query humanQuery($id: String!) {
human(id: $id) {
name
}
}
""",
variables={"id": "not a valid id"},
)
assert result == {"data": {"human": None}}
"""
params = {"id": "not a valid id"}
snapshot.assert_match(client.execute(query, variables=params))
def test_fetch_luke_aliased():
result = client.execute("""
def test_fetch_luke_aliased(snapshot):
query = """
query FetchLukeAliased {
luke: human(id: "1000") {
name
}
}
""")
assert result == {"data": {"luke": {"name": "Luke Skywalker"}}}
"""
snapshot.assert_match(client.execute(query))
def test_fetch_luke_and_leia_aliased():
result = client.execute("""
def test_fetch_luke_and_leia_aliased(snapshot):
query = """
query FetchLukeAndLeiaAliased {
luke: human(id: "1000") {
name
@ -175,14 +120,12 @@ def test_fetch_luke_and_leia_aliased():
name
}
}
""")
assert result == {
"data": {"luke": {"name": "Luke Skywalker"}, "leia": {"name": "Leia Organa"}}
}
"""
snapshot.assert_match(client.execute(query))
def test_duplicate_fields():
result = client.execute("""
def test_duplicate_fields(snapshot):
query = """
query DuplicateFields {
luke: human(id: "1000") {
name
@ -193,17 +136,12 @@ def test_duplicate_fields():
homePlanet
}
}
""")
assert result == {
"data": {
"luke": {"name": "Luke Skywalker", "homePlanet": "Tatooine"},
"leia": {"name": "Leia Organa", "homePlanet": "Alderaan"},
}
}
"""
snapshot.assert_match(client.execute(query))
def test_use_fragment():
result = client.execute("""
def test_use_fragment(snapshot):
query = """
query UseFragment {
luke: human(id: "1000") {
...HumanFragment
@ -216,36 +154,29 @@ def test_use_fragment():
name
homePlanet
}
""")
assert result == {
"data": {
"luke": {"name": "Luke Skywalker", "homePlanet": "Tatooine"},
"leia": {"name": "Leia Organa", "homePlanet": "Alderaan"},
}
}
"""
snapshot.assert_match(client.execute(query))
def test_check_type_of_r2():
result = client.execute("""
def test_check_type_of_r2(snapshot):
query = """
query CheckTypeOfR2 {
hero {
__typename
name
}
}
""")
assert result == {"data": {"hero": {"__typename": "Droid", "name": "R2-D2"}}}
"""
snapshot.assert_match(client.execute(query))
def test_check_type_of_luke():
result = client.execute("""
def test_check_type_of_luke(snapshot):
query = """
query CheckTypeOfLuke {
hero(episode: EMPIRE) {
__typename
name
}
}
""")
assert result == {
"data": {"hero": {"__typename": "Human", "name": "Luke Skywalker"}}
}
"""
snapshot.assert_match(client.execute(query))

View File

@ -0,0 +1,26 @@
# -*- coding: utf-8 -*-
# snapshottest: v1 - https://goo.gl/zC4yUc
from __future__ import unicode_literals
from snapshottest import Snapshot
snapshots = Snapshot()
snapshots["test_correct_fetch_first_ship_rebels 1"] = {
"data": {
"rebels": {
"name": "Alliance to Restore the Republic",
"ships": {
"pageInfo": {
"startCursor": "YXJyYXljb25uZWN0aW9uOjA=",
"endCursor": "YXJyYXljb25uZWN0aW9uOjA=",
"hasNextPage": True,
"hasPreviousPage": False,
},
"edges": [
{"cursor": "YXJyYXljb25uZWN0aW9uOjA=", "node": {"name": "X-Wing"}}
],
},
}
}
}

View File

@ -0,0 +1,28 @@
# -*- coding: utf-8 -*-
# snapshottest: v1 - https://goo.gl/zC4yUc
from __future__ import unicode_literals
from snapshottest import Snapshot
snapshots = Snapshot()
snapshots["test_mutations 1"] = {
"data": {
"introduceShip": {
"ship": {"id": "U2hpcDo5", "name": "Peter"},
"faction": {
"name": "Alliance to Restore the Republic",
"ships": {
"edges": [
{"node": {"id": "U2hpcDox", "name": "X-Wing"}},
{"node": {"id": "U2hpcDoy", "name": "Y-Wing"}},
{"node": {"id": "U2hpcDoz", "name": "A-Wing"}},
{"node": {"id": "U2hpcDo0", "name": "Millennium Falcon"}},
{"node": {"id": "U2hpcDo1", "name": "Home One"}},
{"node": {"id": "U2hpcDo5", "name": "Peter"}},
]
},
},
}
}
}

View File

@ -0,0 +1,118 @@
# -*- coding: utf-8 -*-
# snapshottest: v1 - https://goo.gl/zC4yUc
from __future__ import unicode_literals
from snapshottest import Snapshot
snapshots = Snapshot()
snapshots["test_correctly_fetches_id_name_rebels 1"] = {
"data": {
"rebels": {"id": "RmFjdGlvbjox", "name": "Alliance to Restore the Republic"}
}
}
snapshots["test_correctly_refetches_rebels 1"] = {
"data": {"node": {"id": "RmFjdGlvbjox", "name": "Alliance to Restore the Republic"}}
}
snapshots["test_correctly_fetches_id_name_empire 1"] = {
"data": {"empire": {"id": "RmFjdGlvbjoy", "name": "Galactic Empire"}}
}
snapshots["test_correctly_refetches_empire 1"] = {
"data": {"node": {"id": "RmFjdGlvbjoy", "name": "Galactic Empire"}}
}
snapshots["test_correctly_refetches_xwing 1"] = {
"data": {"node": {"id": "U2hpcDox", "name": "X-Wing"}}
}
snapshots[
"test_str_schema 1"
] = '''type Query {
rebels: Faction
empire: Faction
node(
"""The ID of the object"""
id: ID!
): Node
}
"""A faction in the Star Wars saga"""
type Faction implements Node {
"""The ID of the object"""
id: ID!
"""The name of the faction."""
name: String
"""The ships used by the faction."""
ships(before: String, after: String, first: Int, last: Int): ShipConnection
}
"""An object with an ID"""
interface Node {
"""The ID of the object"""
id: ID!
}
type ShipConnection {
"""Pagination data for this connection."""
pageInfo: PageInfo!
"""Contains the nodes in this connection."""
edges: [ShipEdge]!
}
"""
The Relay compliant `PageInfo` type, containing data necessary to paginate this connection.
"""
type PageInfo {
"""When paginating forwards, are there more items?"""
hasNextPage: Boolean!
"""When paginating backwards, are there more items?"""
hasPreviousPage: Boolean!
"""When paginating backwards, the cursor to continue."""
startCursor: String
"""When paginating forwards, the cursor to continue."""
endCursor: String
}
"""A Relay edge containing a `Ship` and its cursor."""
type ShipEdge {
"""The item at the end of the edge"""
node: Ship
"""A cursor for use in pagination"""
cursor: String!
}
"""A ship in the Star Wars saga"""
type Ship implements Node {
"""The ID of the object"""
id: ID!
"""The name of the ship."""
name: String
}
type Mutation {
introduceShip(input: IntroduceShipInput!): IntroduceShipPayload
}
type IntroduceShipPayload {
ship: Ship
faction: Faction
clientMutationId: String
}
input IntroduceShipInput {
shipName: String!
factionId: String!
clientMutationId: String
}'''

View File

@ -8,46 +8,26 @@ setup()
client = Client(schema)
def test_correct_fetch_first_ship_rebels():
result = client.execute("""
query RebelsShipsQuery {
rebels {
name,
ships(first: 1) {
pageInfo {
startCursor
endCursor
hasNextPage
hasPreviousPage
}
edges {
cursor
node {
name
}
}
def test_correct_fetch_first_ship_rebels(snapshot):
query = """
query RebelsShipsQuery {
rebels {
name,
ships(first: 1) {
pageInfo {
startCursor
endCursor
hasNextPage
hasPreviousPage
}
edges {
cursor
node {
name
}
}
}
""")
assert result == {
"data": {
"rebels": {
"name": "Alliance to Restore the Republic",
"ships": {
"pageInfo": {
"startCursor": "YXJyYXljb25uZWN0aW9uOjA=",
"endCursor": "YXJyYXljb25uZWN0aW9uOjA=",
"hasNextPage": True,
"hasPreviousPage": False,
},
"edges": [
{
"cursor": "YXJyYXljb25uZWN0aW9uOjA=",
"node": {"name": "X-Wing"},
}
],
},
}
}
}
}
"""
snapshot.assert_match(client.execute(query))

View File

@ -8,45 +8,26 @@ setup()
client = Client(schema)
def test_mutations():
result = client.execute("""
mutation MyMutation {
introduceShip(input:{clientMutationId:"abc", shipName: "Peter", factionId: "1"}) {
ship {
id
name
}
faction {
name
ships {
edges {
node {
id
name
}
}
def test_mutations(snapshot):
query = """
mutation MyMutation {
introduceShip(input:{clientMutationId:"abc", shipName: "Peter", factionId: "1"}) {
ship {
id
name
}
faction {
name
ships {
edges {
node {
id
name
}
}
}
}
""")
assert result == {
"data": {
"introduceShip": {
"ship": {"id": "U2hpcDo5", "name": "Peter"},
"faction": {
"name": "Alliance to Restore the Republic",
"ships": {
"edges": [
{"node": {"id": "U2hpcDox", "name": "X-Wing"}},
{"node": {"id": "U2hpcDoy", "name": "Y-Wing"}},
{"node": {"id": "U2hpcDoz", "name": "A-Wing"}},
{"node": {"id": "U2hpcDo0", "name": "Millennium Falcon"}},
{"node": {"id": "U2hpcDo1", "name": "Home One"}},
{"node": {"id": "U2hpcDo5", "name": "Peter"}},
]
},
},
}
}
}
}
"""
snapshot.assert_match(client.execute(query))

View File

@ -1,5 +1,3 @@
import textwrap
from graphene.test import Client
from ..data import setup
@ -10,115 +8,24 @@ setup()
client = Client(schema)
def test_str_schema():
assert str(schema).strip() == textwrap.dedent(
'''\
type Query {
rebels: Faction
empire: Faction
node(
"""The ID of the object"""
id: ID!
): Node
}
"""A faction in the Star Wars saga"""
type Faction implements Node {
"""The ID of the object"""
id: ID!
"""The name of the faction."""
name: String
"""The ships used by the faction."""
ships(before: String, after: String, first: Int, last: Int): ShipConnection
}
"""An object with an ID"""
interface Node {
"""The ID of the object"""
id: ID!
}
type ShipConnection {
"""Pagination data for this connection."""
pageInfo: PageInfo!
"""Contains the nodes in this connection."""
edges: [ShipEdge]!
}
"""
The Relay compliant `PageInfo` type, containing data necessary to paginate this connection.
"""
type PageInfo {
"""When paginating forwards, are there more items?"""
hasNextPage: Boolean!
"""When paginating backwards, are there more items?"""
hasPreviousPage: Boolean!
"""When paginating backwards, the cursor to continue."""
startCursor: String
"""When paginating forwards, the cursor to continue."""
endCursor: String
}
"""A Relay edge containing a `Ship` and its cursor."""
type ShipEdge {
"""The item at the end of the edge"""
node: Ship
"""A cursor for use in pagination"""
cursor: String!
}
"""A ship in the Star Wars saga"""
type Ship implements Node {
"""The ID of the object"""
id: ID!
"""The name of the ship."""
name: String
}
type Mutation {
introduceShip(input: IntroduceShipInput!): IntroduceShipPayload
}
type IntroduceShipPayload {
ship: Ship
faction: Faction
clientMutationId: String
}
input IntroduceShipInput {
shipName: String!
factionId: String!
clientMutationId: String
}'''
)
def test_str_schema(snapshot):
snapshot.assert_match(str(schema).strip())
def test_correctly_fetches_id_name_rebels():
result = client.execute("""
def test_correctly_fetches_id_name_rebels(snapshot):
query = """
query RebelsQuery {
rebels {
id
name
}
}
""")
assert result == {
"data": {
"rebels": {"id": "RmFjdGlvbjox", "name": "Alliance to Restore the Republic"}
}
}
"""
snapshot.assert_match(client.execute(query))
def test_correctly_refetches_rebels():
result = client.execute("""
def test_correctly_refetches_rebels(snapshot):
query = """
query RebelsRefetchQuery {
node(id: "RmFjdGlvbjox") {
id
@ -127,30 +34,24 @@ def test_correctly_refetches_rebels():
}
}
}
""")
assert result == {
"data": {
"node": {"id": "RmFjdGlvbjox", "name": "Alliance to Restore the Republic"}
}
}
"""
snapshot.assert_match(client.execute(query))
def test_correctly_fetches_id_name_empire():
result = client.execute("""
def test_correctly_fetches_id_name_empire(snapshot):
query = """
query EmpireQuery {
empire {
id
name
}
}
""")
assert result == {
"data": {"empire": {"id": "RmFjdGlvbjoy", "name": "Galactic Empire"}}
}
"""
snapshot.assert_match(client.execute(query))
def test_correctly_refetches_empire():
result = client.execute("""
def test_correctly_refetches_empire(snapshot):
query = """
query EmpireRefetchQuery {
node(id: "RmFjdGlvbjoy") {
id
@ -159,14 +60,12 @@ def test_correctly_refetches_empire():
}
}
}
""")
assert result == {
"data": {"node": {"id": "RmFjdGlvbjoy", "name": "Galactic Empire"}}
}
"""
snapshot.assert_match(client.execute(query))
def test_correctly_refetches_xwing():
result = client.execute("""
def test_correctly_refetches_xwing(snapshot):
query = """
query XWingRefetchQuery {
node(id: "U2hpcDox") {
id
@ -175,5 +74,5 @@ def test_correctly_refetches_xwing():
}
}
}
""")
assert result == {"data": {"node": {"id": "U2hpcDox", "name": "X-Wing"}}}
"""
snapshot.assert_match(client.execute(query))

View File

@ -46,7 +46,7 @@ from .types import (
from .utils.module_loading import lazy_import
from .utils.resolve_only_args import resolve_only_args
VERSION = (3, 4, 3, "final", 0)
VERSION = (3, 2, 2, "final", 0)
__version__ = get_version(VERSION)

File diff suppressed because it is too large Load Diff

View File

@ -1,3 +1,5 @@
from __future__ import unicode_literals
import datetime
import os
import subprocess
@ -71,6 +73,6 @@ def get_git_changeset():
)
timestamp = git_log.communicate()[0]
timestamp = datetime.datetime.utcfromtimestamp(int(timestamp))
except Exception:
except:
return None
return timestamp.strftime("%Y%m%d%H%M%S")

View File

@ -13,18 +13,12 @@ from .node import is_node, AbstractNode
def get_edge_class(
connection_class: Type["Connection"],
_node: Type[AbstractNode],
base_name: str,
strict_types: bool = False,
connection_class: Type["Connection"], _node: Type[AbstractNode], base_name: str
):
edge_class = getattr(connection_class, "Edge", None)
class EdgeBase:
node = Field(
NonNull(_node) if strict_types else _node,
description="The item at the end of the edge",
)
node = Field(_node, description="The item at the end of the edge")
cursor = String(required=True, description="A cursor for use in pagination")
class EdgeMeta:
@ -89,9 +83,7 @@ class Connection(ObjectType):
abstract = True
@classmethod
def __init_subclass_with_meta__(
cls, node=None, name=None, strict_types=False, _meta=None, **options
):
def __init_subclass_with_meta__(cls, node=None, name=None, _meta=None, **options):
if not _meta:
_meta = ConnectionOptions(cls)
assert node, f"You have to provide a node in {cls.__name__}.Meta"
@ -119,10 +111,10 @@ class Connection(ObjectType):
)
if "edges" not in _meta.fields:
edge_class = get_edge_class(cls, node, base_name, strict_types) # type: ignore
edge_class = get_edge_class(cls, node, base_name) # type: ignore
cls.Edge = edge_class
_meta.fields["edges"] = Field(
NonNull(List(NonNull(edge_class) if strict_types else edge_class)),
NonNull(List(edge_class)),
description="Contains the nodes in this connection.",
)

View File

@ -11,7 +11,7 @@ class BaseGlobalIDType:
Base class that define the required attributes/method for a type.
"""
graphene_type: Type[BaseType] = ID
graphene_type = ID # type: Type[BaseType]
@classmethod
def resolve_global_id(cls, info, global_id):

View File

@ -299,20 +299,3 @@ def test_connectionfield_required():
executed = schema.execute("{ testConnection { edges { cursor } } }")
assert not executed.errors
assert executed.data == {"testConnection": {"edges": []}}
def test_connectionfield_strict_types():
class MyObjectConnection(Connection):
class Meta:
node = MyObject
strict_types = True
connection_field = ConnectionField(MyObjectConnection)
edges_field_type = connection_field.type._meta.fields["edges"].type
assert isinstance(edges_field_type, NonNull)
edges_list_element_type = edges_field_type.of_type.of_type
assert isinstance(edges_list_element_type, NonNull)
node_field = edges_list_element_type.of_type._meta.fields["node"]
assert isinstance(node_field.type, NonNull)

View File

@ -9,7 +9,7 @@ from ...types import Int, ObjectType, Schema, String
class TestUUIDGlobalID:
def setup_method(self):
def setup(self):
self.user_list = [
{"id": uuid4(), "name": "First"},
{"id": uuid4(), "name": "Second"},
@ -77,7 +77,7 @@ class TestUUIDGlobalID:
class TestSimpleGlobalID:
def setup_method(self):
def setup(self):
self.user_list = [
{"id": "my global primary key in clear 1", "name": "First"},
{"id": "my global primary key in clear 2", "name": "Second"},
@ -140,7 +140,7 @@ class TestSimpleGlobalID:
class TestCustomGlobalID:
def setup_method(self):
def setup(self):
self.user_list = [
{"id": 1, "name": "First"},
{"id": 2, "name": "Second"},
@ -219,7 +219,7 @@ class TestCustomGlobalID:
class TestIncompleteCustomGlobalID:
def setup_method(self):
def setup(self):
self.user_list = [
{"id": 1, "name": "First"},
{"id": 2, "name": "Second"},

View File

@ -8,6 +8,7 @@ from ..node import Node, is_node
class SharedNodeFields:
shared = String()
something_else = String()

View File

@ -1,41 +0,0 @@
# https://github.com/graphql-python/graphene/issues/1293
from datetime import datetime, timezone
import graphene
from graphql.utilities import print_schema
class Filters(graphene.InputObjectType):
datetime_after = graphene.DateTime(
required=False,
default_value=datetime.fromtimestamp(1434549820.776, timezone.utc),
)
datetime_before = graphene.DateTime(
required=False,
default_value=datetime.fromtimestamp(1444549820.776, timezone.utc),
)
class SetDatetime(graphene.Mutation):
class Arguments:
filters = Filters(required=True)
ok = graphene.Boolean()
def mutate(root, info, filters):
return SetDatetime(ok=True)
class Query(graphene.ObjectType):
goodbye = graphene.String()
class Mutations(graphene.ObjectType):
set_datetime = SetDatetime.Field()
def test_schema_printable_with_default_datetime_value():
schema = graphene.Schema(query=Query, mutation=Mutations)
schema_str = print_schema(schema.graphql_schema)
assert schema_str, "empty schema printed"

View File

@ -1,27 +0,0 @@
import pickle
from ...types.enum import Enum
class PickleEnum(Enum):
# is defined outside of test because pickle unable to dump class inside ot pytest function
A = "a"
B = 1
def test_enums_pickling():
a = PickleEnum.A
pickled = pickle.dumps(a)
restored = pickle.loads(pickled)
assert type(a) is type(restored)
assert a == restored
assert a.value == restored.value
assert a.name == restored.name
b = PickleEnum.B
pickled = pickle.dumps(b)
restored = pickle.loads(pickled)
assert type(a) is type(restored)
assert b == restored
assert b.value == restored.value
assert b.name == restored.name

View File

@ -1,3 +1,4 @@
# flake8: noqa
from graphql import GraphQLResolveInfo as ResolveInfo
from .argument import Argument

View File

@ -1,17 +1,17 @@
from typing import Type, Optional
from typing import Type
from ..utils.subclass_with_meta import SubclassWithMeta, SubclassWithMeta_Meta
from ..utils.trim_docstring import trim_docstring
class BaseOptions:
name: Optional[str] = None
description: Optional[str] = None
name = None # type: str
description = None # type: str
_frozen: bool = False
_frozen = False # type: bool
def __init__(self, class_type: Type):
self.class_type: Type = class_type
def __init__(self, class_type):
self.class_type = class_type # type: Type
def freeze(self):
self._frozen = True

View File

@ -1,7 +1,8 @@
from __future__ import absolute_import
import datetime
from dateutil.parser import isoparse
from aniso8601 import parse_date, parse_datetime, parse_time
from graphql.error import GraphQLError
from graphql.language import StringValueNode, print_ast
@ -38,7 +39,7 @@ class Date(Scalar):
if not isinstance(value, str):
raise GraphQLError(f"Date cannot represent non-string value: {repr(value)}")
try:
return datetime.date.fromisoformat(value)
return parse_date(value)
except ValueError:
raise GraphQLError(f"Date cannot represent value: {repr(value)}")
@ -73,7 +74,7 @@ class DateTime(Scalar):
f"DateTime cannot represent non-string value: {repr(value)}"
)
try:
return isoparse(value)
return parse_datetime(value)
except ValueError:
raise GraphQLError(f"DateTime cannot represent value: {repr(value)}")
@ -106,6 +107,6 @@ class Time(Scalar):
if not isinstance(value, str):
raise GraphQLError(f"Time cannot represent non-string value: {repr(value)}")
try:
return datetime.time.fromisoformat(value)
return parse_time(value)
except ValueError:
raise GraphQLError(f"Time cannot represent value: {repr(value)}")

View File

@ -1,3 +1,5 @@
from __future__ import absolute_import
from decimal import Decimal as _Decimal
from graphql import Undefined

View File

@ -31,11 +31,9 @@ class EnumMeta(SubclassWithMeta_Meta):
# with the enum values.
enum_members.pop("Meta", None)
enum = PyEnum(cls.__name__, enum_members)
obj = SubclassWithMeta_Meta.__new__(
return SubclassWithMeta_Meta.__new__(
cls, name_, bases, dict(classdict, __enum__=enum), **options
)
globals()[name_] = obj.__enum__
return obj
def get(cls, value):
return cls._meta.enum(value)
@ -61,9 +59,11 @@ class EnumMeta(SubclassWithMeta_Meta):
def __iter__(cls):
return cls._meta.enum.__iter__()
def from_enum(cls, enum, name=None, description=None, deprecation_reason=None): # noqa: N805
def from_enum(
cls, enum, name=None, description=None, deprecation_reason=None
): # noqa: N805
name = name or enum.__name__
description = description or enum.__doc__ or "An enumeration."
description = description or enum.__doc__
meta_dict = {
"enum": enum,
"description": description,

View File

@ -43,8 +43,7 @@ class Field(MountedType):
args:
type (class for a graphene.UnmountedType): Must be a class (not an instance) of an
unmounted graphene type (ex. scalar or object) which is used for the type of this
field in the GraphQL schema. You can provide a dotted module import path (string)
to the class instead of the class itself (e.g. to avoid circular import issues).
field in the GraphQL schema.
args (optional, Dict[str, graphene.Argument]): Arguments that can be input to the field.
Prefer to use ``**extra_args``, unless you use an argument name that clashes with one
of the Field arguments presented here (see :ref:`example<ResolverParamGraphQLArguments>`).

View File

@ -1,3 +1,5 @@
from __future__ import unicode_literals
from graphql.language.ast import (
BooleanValueNode,
FloatValueNode,

View File

@ -1,12 +1,11 @@
from typing import TYPE_CHECKING
from .base import BaseOptions, BaseType
from .inputfield import InputField
from .unmountedtype import UnmountedType
from .utils import yank_fields_from_attrs
# For static type checking with type checker
if TYPE_CHECKING:
# For static type checking with Mypy
MYPY = False
if MYPY:
from typing import Dict, Callable # NOQA
@ -15,31 +14,6 @@ class InputObjectTypeOptions(BaseOptions):
container = None # type: InputObjectTypeContainer
# Currently in Graphene, we get a `None` whenever we access an (optional) field that was not set in an InputObjectType
# using the InputObjectType.<attribute> dot access syntax. This is ambiguous, because in this current (Graphene
# historical) arrangement, we cannot distinguish between a field not being set and a field being set to None.
# At the same time, we shouldn't break existing code that expects a `None` when accessing a field that was not set.
_INPUT_OBJECT_TYPE_DEFAULT_VALUE = None
# To mitigate this, we provide the function `set_input_object_type_default_value` to allow users to change the default
# value returned in non-specified fields in InputObjectType to another meaningful sentinel value (e.g. Undefined)
# if they want to. This way, we can keep code that expects a `None` working while we figure out a better solution (or
# a well-documented breaking change) for this issue.
def set_input_object_type_default_value(default_value):
"""
Change the sentinel value returned by non-specified fields in an InputObjectType
Useful to differentiate between a field not being set and a field being set to None by using a sentinel value
(e.g. Undefined is a good sentinel value for this purpose)
This function should be called at the beginning of the app or in some other place where it is guaranteed to
be called before any InputObjectType is defined.
"""
global _INPUT_OBJECT_TYPE_DEFAULT_VALUE
_INPUT_OBJECT_TYPE_DEFAULT_VALUE = default_value
class InputObjectTypeContainer(dict, BaseType): # type: ignore
class Meta:
abstract = True
@ -47,7 +21,7 @@ class InputObjectTypeContainer(dict, BaseType): # type: ignore
def __init__(self, *args, **kwargs):
dict.__init__(self, *args, **kwargs)
for key in self._meta.fields:
setattr(self, key, self.get(key, _INPUT_OBJECT_TYPE_DEFAULT_VALUE))
setattr(self, key, self.get(key, None))
def __init_subclass__(cls, *args, **kwargs):
pass

View File

@ -1,11 +1,10 @@
from typing import TYPE_CHECKING
from .base import BaseOptions, BaseType
from .field import Field
from .utils import yank_fields_from_attrs
# For static type checking with type checker
if TYPE_CHECKING:
# For static type checking with Mypy
MYPY = False
if MYPY:
from typing import Dict, Iterable, Type # NOQA

View File

@ -1,3 +1,5 @@
from __future__ import absolute_import
import json
from graphql import Undefined

View File

@ -1,5 +1,3 @@
from typing import TYPE_CHECKING
from ..utils.deprecated import warn_deprecation
from ..utils.get_unbound_function import get_unbound_function
from ..utils.props import props
@ -8,8 +6,9 @@ from .objecttype import ObjectType, ObjectTypeOptions
from .utils import yank_fields_from_attrs
from .interface import Interface
# For static type checking with type checker
if TYPE_CHECKING:
# For static type checking with Mypy
MYPY = False
if MYPY:
from .argument import Argument # NOQA
from typing import Dict, Type, Callable, Iterable # NOQA

View File

@ -1,14 +1,15 @@
from typing import TYPE_CHECKING
from .base import BaseOptions, BaseType, BaseTypeMeta
from .field import Field
from .interface import Interface
from .utils import yank_fields_from_attrs
from dataclasses import make_dataclass, field
# For static type checking with type checker
if TYPE_CHECKING:
try:
from dataclasses import make_dataclass, field
except ImportError:
from ..pyutils.dataclasses import make_dataclass, field # type: ignore
# For static type checking with Mypy
MYPY = False
if MYPY:
from typing import Dict, Iterable, Type # NOQA

View File

@ -121,7 +121,8 @@ class Float(Scalar):
"""
@staticmethod
def coerce_float(value: Any) -> float:
def coerce_float(value):
# type: (Any) -> float
try:
return float(value)
except ValueError:

View File

@ -1,12 +0,0 @@
import pytest
from graphql import Undefined
from graphene.types.inputobjecttype import set_input_object_type_default_value
@pytest.fixture()
def set_default_input_object_type_to_undefined():
"""This fixture is used to change the default value of optional inputs in InputObjectTypes for specific tests"""
set_input_object_type_default_value(Undefined)
yield
set_input_object_type_default_value(None)

View File

@ -1,5 +1,6 @@
import datetime
import pytz
from graphql import GraphQLError
from pytest import fixture
@ -29,7 +30,7 @@ schema = Schema(query=Query)
@fixture
def sample_datetime():
utc_datetime = datetime.datetime(2019, 5, 25, 5, 30, 15, 10, datetime.timezone.utc)
utc_datetime = datetime.datetime(2019, 5, 25, 5, 30, 15, 10, pytz.utc)
return utc_datetime
@ -227,18 +228,6 @@ def test_time_query_variable(sample_time):
assert result.data == {"time": isoformat}
def test_support_isoformat():
isoformat = "2011-11-04T00:05:23Z"
# test time variable provided as Python time
result = schema.execute(
"""query DateTime($time: DateTime){ datetime(in: $time) }""",
variables={"time": isoformat},
)
assert not result.errors
assert result.data == {"datetime": "2011-11-04T00:05:23+00:00"}
def test_bad_variables(sample_date, sample_datetime, sample_time):
def _test_bad_variables(type_, input_):
result = schema.execute(

View File

@ -65,21 +65,6 @@ def test_enum_from_builtin_enum():
assert RGB.BLUE
def test_enum_custom_description_in_constructor():
description = "An enumeration, but with a custom description"
RGB = Enum(
"RGB",
"RED,GREEN,BLUE",
description=description,
)
assert RGB._meta.description == description
def test_enum_from_python3_enum_uses_default_builtin_doc():
RGB = Enum("RGB", "RED,GREEN,BLUE")
assert RGB._meta.description == "An enumeration."
def test_enum_from_builtin_enum_accepts_lambda_description():
def custom_description(value):
if not value:

View File

@ -1,5 +1,3 @@
from graphql import Undefined
from ..argument import Argument
from ..field import Field
from ..inputfield import InputField
@ -8,7 +6,6 @@ from ..objecttype import ObjectType
from ..scalars import Boolean, String
from ..schema import Schema
from ..unmountedtype import UnmountedType
from ... import NonNull
class MyType:
@ -139,31 +136,3 @@ def test_inputobjecttype_of_input():
assert not result.errors
assert result.data == {"isChild": True}
def test_inputobjecttype_default_input_as_undefined(
set_default_input_object_type_to_undefined,
):
class TestUndefinedInput(InputObjectType):
required_field = String(required=True)
optional_field = String()
class Query(ObjectType):
undefined_optionals_work = Field(NonNull(Boolean), input=TestUndefinedInput())
def resolve_undefined_optionals_work(self, info, input: TestUndefinedInput):
# Confirm that optional_field comes as Undefined
return (
input.required_field == "required" and input.optional_field is Undefined
)
schema = Schema(query=Query)
result = schema.execute(
"""query basequery {
undefinedOptionalsWork(input: {requiredField: "required"})
}
"""
)
assert not result.errors
assert result.data == {"undefinedOptionalsWork": True}

View File

@ -51,30 +51,35 @@ def test_jsonstring_invalid_query():
Test that if an invalid type is provided we get an error
"""
result = schema.execute("{ json(input: 1) }")
assert result.errors == [
{"message": "Expected value of type 'JSONString', found 1."},
]
assert result.errors
assert len(result.errors) == 1
assert result.errors[0].message == "Expected value of type 'JSONString', found 1."
result = schema.execute("{ json(input: {}) }")
assert result.errors == [
{"message": "Expected value of type 'JSONString', found {}."},
]
assert result.errors
assert len(result.errors) == 1
assert result.errors[0].message == "Expected value of type 'JSONString', found {}."
result = schema.execute('{ json(input: "a") }')
assert result.errors == [
{
"message": "Expected value of type 'JSONString', found \"a\"; "
"Badly formed JSONString: Expecting value: line 1 column 1 (char 0)",
},
]
assert result.errors
assert len(result.errors) == 1
assert result.errors[0].message == (
"Expected value of type 'JSONString', found \"a\"; "
"Badly formed JSONString: Expecting value: line 1 column 1 (char 0)"
)
result = schema.execute("""{ json(input: "{\\'key\\': 0}") }""")
assert result.errors == [
{"message": "Syntax Error: Invalid character escape sequence: '\\''."},
]
assert result.errors
assert len(result.errors) == 1
assert (
result.errors[0].message
== "Syntax Error: Invalid character escape sequence: '\\''."
)
result = schema.execute("""{ json(input: "{\\"key\\": 0,}") }""")
assert result.errors
assert len(result.errors) == 1
assert result.errors[0].message.startswith(
'Expected value of type \'JSONString\', found "{\\"key\\": 0,}"; Badly formed JSONString:'
assert result.errors[0].message == (
'Expected value of type \'JSONString\', found "{\\"key\\": 0,}"; '
"Badly formed JSONString: Expecting property name enclosed in double quotes: line 1 column 11 (char 10)"
)

View File

@ -39,7 +39,7 @@ def test_serializes_output_string():
assert String.serialize(-1.1) == "-1.1"
assert String.serialize(True) == "true"
assert String.serialize(False) == "false"
assert String.serialize("\U0001f601") == "\U0001f601"
assert String.serialize("\U0001F601") == "\U0001F601"
def test_serializes_output_boolean():

View File

@ -20,8 +20,8 @@ from ..inputobjecttype import InputObjectType
from ..interface import Interface
from ..objecttype import ObjectType
from ..scalars import Int, String
from ..schema import Schema
from ..structures import List, NonNull
from ..schema import Schema
def create_type_map(types, auto_camelcase=True):
@ -227,18 +227,6 @@ def test_inputobject():
assert foo_field.description == "Field description"
def test_inputobject_undefined(set_default_input_object_type_to_undefined):
class OtherObjectType(InputObjectType):
optional_field = String()
type_map = create_type_map([OtherObjectType])
assert "OtherObjectType" in type_map
graphql_type = type_map["OtherObjectType"]
container = graphql_type.out_type({})
assert container.optional_field is Undefined
def test_objecttype_camelcase():
class MyObjectType(ObjectType):
"""Description"""

View File

@ -36,21 +36,6 @@ def test_uuidstring_query_variable():
assert result.data == {"uuid": uuid_value}
def test_uuidstring_invalid_argument():
uuid_value = {"not": "a string"}
result = schema.execute(
"""query Test($uuid: UUID){ uuid(input: $uuid) }""",
variables={"uuid": uuid_value},
)
assert result.errors
assert len(result.errors) == 1
assert (
result.errors[0].message
== "Variable '$uuid' got invalid value {'not': 'a string'}; UUID cannot represent value: {'not': 'a string'}"
)
def test_uuidstring_optional_uuid_input():
"""
Test that we can provide a null value to an optional input

View File

@ -1,10 +1,9 @@
from typing import TYPE_CHECKING
from .base import BaseOptions, BaseType
from .unmountedtype import UnmountedType
# For static type checking with type checker
if TYPE_CHECKING:
# For static type checking with Mypy
MYPY = False
if MYPY:
from .objecttype import ObjectType # NOQA
from typing import Iterable, Type # NOQA
@ -51,14 +50,12 @@ class Union(UnmountedType, BaseType):
"""
@classmethod
def __init_subclass_with_meta__(cls, types=None, _meta=None, **options):
def __init_subclass_with_meta__(cls, types=None, **options):
assert (
isinstance(types, (list, tuple)) and len(types) > 0
), f"Must provide types for Union {cls.__name__}."
if not _meta:
_meta = UnionOptions(cls)
_meta = UnionOptions(cls)
_meta.types = types
super(Union, cls).__init_subclass_with_meta__(_meta=_meta, **options)

View File

@ -1,6 +1,6 @@
from __future__ import absolute_import
from uuid import UUID as _UUID
from graphql.error import GraphQLError
from graphql.language.ast import StringValueNode
from graphql import Undefined
@ -29,9 +29,4 @@ class UUID(Scalar):
@staticmethod
def parse_value(value):
if isinstance(value, _UUID):
return value
try:
return _UUID(value)
except (ValueError, AttributeError):
raise GraphQLError(f"UUID cannot represent value: {repr(value)}")
return _UUID(value)

View File

@ -9,7 +9,7 @@ from collections import namedtuple
from collections.abc import Iterable
from functools import partial
from typing import List
from typing import List # flake8: noqa
Loader = namedtuple("Loader", "key,future")
@ -33,6 +33,7 @@ class DataLoader(object):
cache_map=None,
loop=None,
):
self._loop = loop
if batch_load_fn is not None:
@ -62,7 +63,7 @@ class DataLoader(object):
self.get_cache_key = get_cache_key or (lambda x: x)
self._cache = cache_map if cache_map is not None else {}
self._queue: List[Loader] = []
self._queue = [] # type: List[Loader]
@property
def loop(self):

View File

@ -1,5 +1,70 @@
from warnings import warn
import functools
import inspect
import warnings
string_types = (type(b""), type(""))
def warn_deprecation(text: str):
warn(text, category=DeprecationWarning, stacklevel=2)
def warn_deprecation(text):
warnings.warn(text, category=DeprecationWarning, stacklevel=2)
def deprecated(reason):
"""
This is a decorator which can be used to mark functions
as deprecated. It will result in a warning being emitted
when the function is used.
"""
if isinstance(reason, string_types):
# The @deprecated is used with a 'reason'.
#
# .. code-block:: python
#
# @deprecated("please, use another function")
# def old_function(x, y):
# pass
def decorator(func1):
if inspect.isclass(func1):
fmt1 = f"Call to deprecated class {func1.__name__} ({reason})."
else:
fmt1 = f"Call to deprecated function {func1.__name__} ({reason})."
@functools.wraps(func1)
def new_func1(*args, **kwargs):
warn_deprecation(fmt1)
return func1(*args, **kwargs)
return new_func1
return decorator
elif inspect.isclass(reason) or inspect.isfunction(reason):
# The @deprecated is used without any 'reason'.
#
# .. code-block:: python
#
# @deprecated
# def old_function(x, y):
# pass
func2 = reason
if inspect.isclass(func2):
fmt2 = f"Call to deprecated class {func2.__name__}."
else:
fmt2 = f"Call to deprecated function {func2.__name__}."
@functools.wraps(func2)
def new_func2(*args, **kwargs):
warn_deprecation(fmt2)
return func2(*args, **kwargs)
return new_func2
else:
raise TypeError(repr(type(reason)))

View File

@ -1,5 +1,6 @@
from functools import wraps
from typing_extensions import deprecated
from .deprecated import deprecated
@deprecated("This function is deprecated")

View File

@ -1,9 +1,75 @@
from pytest import raises
from .. import deprecated
from ..deprecated import deprecated as deprecated_decorator
from ..deprecated import warn_deprecation
def test_warn_deprecation(mocker):
mocker.patch.object(deprecated, "warn")
mocker.patch.object(deprecated.warnings, "warn")
warn_deprecation("OH!")
deprecated.warn.assert_called_with("OH!", stacklevel=2, category=DeprecationWarning)
deprecated.warnings.warn.assert_called_with(
"OH!", stacklevel=2, category=DeprecationWarning
)
def test_deprecated_decorator(mocker):
mocker.patch.object(deprecated, "warn_deprecation")
@deprecated_decorator
def my_func():
return True
result = my_func()
assert result
deprecated.warn_deprecation.assert_called_with(
"Call to deprecated function my_func."
)
def test_deprecated_class(mocker):
mocker.patch.object(deprecated, "warn_deprecation")
@deprecated_decorator
class X:
pass
result = X()
assert result
deprecated.warn_deprecation.assert_called_with("Call to deprecated class X.")
def test_deprecated_decorator_text(mocker):
mocker.patch.object(deprecated, "warn_deprecation")
@deprecated_decorator("Deprecation text")
def my_func():
return True
result = my_func()
assert result
deprecated.warn_deprecation.assert_called_with(
"Call to deprecated function my_func (Deprecation text)."
)
def test_deprecated_class_text(mocker):
mocker.patch.object(deprecated, "warn_deprecation")
@deprecated_decorator("Deprecation text")
class X:
pass
result = X()
assert result
deprecated.warn_deprecation.assert_called_with(
"Call to deprecated class X (Deprecation text)."
)
def test_deprecated_other_object(mocker):
mocker.patch.object(deprecated, "warn_deprecation")
with raises(TypeError):
deprecated_decorator({})

View File

@ -9,5 +9,6 @@ def test_resolve_only_args(mocker):
return root, args
wrapped_resolver = resolve_only_args(resolver)
assert deprecated.warn_deprecation.called
result = wrapped_resolver(1, 2, a=3)
assert result == (1, {"a": 3})

View File

@ -30,7 +30,7 @@ try:
except ImportError:
# backwards compatibility for v3.6
from typing import Pattern
from typing import Callable, Dict, List, Optional, Union, Tuple
from typing import Callable, Dict, List, Optional, Union
from graphql import GraphQLError
from graphql.validation import ValidationContext, ValidationRule
@ -82,7 +82,7 @@ def depth_limit_validator(
def get_fragments(
definitions: Tuple[DefinitionNode, ...],
definitions: List[DefinitionNode],
) -> Dict[str, FragmentDefinitionNode]:
fragments = {}
for definition in definitions:
@ -94,7 +94,7 @@ def get_fragments(
# This will actually get both queries and mutations.
# We can basically treat those the same
def get_queries_and_mutations(
definitions: Tuple[DefinitionNode, ...],
definitions: List[DefinitionNode],
) -> Dict[str, OperationDefinitionNode]:
operations = {}

View File

@ -1,5 +1,16 @@
[flake8]
exclude = setup.py,docs/*,*/examples/*,graphene/pyutils/*,tests
max-line-length = 120
# This is a specific ignore for Black+Flake8
# source: https://black.readthedocs.io/en/stable/guides/using_black_with_other_tools.html#id1
extend-ignore = E203
[coverage:run]
omit = graphene/pyutils/*,*/tests/*,graphene/types/scalars.py
[isort]
known_first_party=graphene
[bdist_wheel]
universal=1

View File

@ -45,50 +45,47 @@ class PyTest(TestCommand):
tests_require = [
"pytest>=8,<9",
"pytest-benchmark>=4,<5",
"pytest-cov>=5,<6",
"pytest>=6,<7",
"pytest-benchmark>=3.4,<4",
"pytest-cov>=3,<4",
"pytest-mock>=3,<4",
"pytest-asyncio>=0.16,<2",
"coveralls>=3.3,<5",
"snapshottest>=0.6,<1",
"coveralls>=3.3,<4",
"mock>=4,<5",
"pytz==2022.1",
"iso8601>=1,<2",
]
dev_requires = [
"ruff==0.5.0",
"types-python-dateutil>=2.8.1,<3",
"mypy>=1.10,<2",
] + tests_require
dev_requires = ["black==22.3.0", "flake8>=4,<5"] + tests_require
setup(
name="graphene",
version=version,
description="GraphQL Framework for Python",
long_description=codecs.open(
"README.md", "r", encoding="ascii", errors="replace"
"README.rst", "r", encoding="ascii", errors="replace"
).read(),
long_description_content_type="text/markdown",
url="https://github.com/graphql-python/graphene",
author="Syrus Akbary",
author_email="me@syrusakbary.com",
license="MIT",
classifiers=[
"Development Status :: 5 - Production/Stable",
"Development Status :: 3 - Alpha",
"Intended Audience :: Developers",
"Topic :: Software Development :: Libraries",
"Programming Language :: Python :: 3.6",
"Programming Language :: Python :: 3.7",
"Programming Language :: Python :: 3.8",
"Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: 3.13",
],
keywords="api graphql protocol rest relay graphene",
packages=find_packages(exclude=["examples*"]),
install_requires=[
"graphql-core>=3.1,<3.3",
"graphql-relay>=3.1,<3.3",
"python-dateutil>=2.7.0,<3",
"typing-extensions>=4.7.1,<5",
"aniso8601>=8,<10",
],
tests_require=tests_require,
extras_require={"test": tests_require, "dev": dev_requires},

10
tox.ini
View File

@ -1,17 +1,19 @@
[tox]
envlist = py3{8,9,10,11,12,13}, mypy, pre-commit
envlist = py3{6,7,8,9,10}, mypy, pre-commit
skipsdist = true
[testenv]
deps =
.[test]
setenv =
PYTHONPATH = .:{envdir}
commands =
pytest --cov=graphene graphene --cov-report=term --cov-report=xml examples {posargs}
py{36,37,38,39,310}: pytest --cov=graphene graphene --cov-report=term --cov-report=xml examples {posargs}
[testenv:pre-commit]
basepython = python3.10
deps =
pre-commit>=3.7,<4
pre-commit>=2.16,<3
setenv =
LC_CTYPE=en_US.UTF-8
commands =
@ -20,7 +22,7 @@ commands =
[testenv:mypy]
basepython = python3.10
deps =
.[dev]
mypy>=0.950,<1
commands =
mypy graphene