Merge branch 'main' into setting-convert-choices-to-enum

This commit is contained in:
Firas Kafri 2023-12-14 11:21:11 +03:00 committed by GitHub
commit d947a75d1e
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
38 changed files with 641 additions and 188 deletions

View File

@ -6,8 +6,13 @@ on:
- 'v*' - 'v*'
jobs: jobs:
build: lint:
uses: ./.github/workflows/lint.yml
tests:
uses: ./.github/workflows/tests.yml
release:
runs-on: ubuntu-latest runs-on: ubuntu-latest
needs: [lint, tests]
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3

View File

@ -4,6 +4,7 @@ on:
push: push:
branches: ["main"] branches: ["main"]
pull_request: pull_request:
workflow_call:
jobs: jobs:
build: build:

View File

@ -4,6 +4,7 @@ on:
push: push:
branches: ["main"] branches: ["main"]
pull_request: pull_request:
workflow_call:
jobs: jobs:
build: build:
@ -11,13 +12,17 @@ jobs:
strategy: strategy:
max-parallel: 4 max-parallel: 4
matrix: matrix:
django: ["3.2", "4.1", "4.2"] django: ["3.2", "4.2", "5.0"]
python-version: ["3.8", "3.9", "3.10"] python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
include: exclude:
- django: "4.1" - django: "3.2"
python-version: "3.11"
- django: "4.2"
python-version: "3.11" python-version: "3.11"
- django: "3.2"
python-version: "3.12"
- django: "5.0"
python-version: "3.8"
- django: "5.0"
python-version: "3.9"
steps: steps:
- uses: actions/checkout@v3 - uses: actions/checkout@v3
- name: Set up Python ${{ matrix.python-version }} - name: Set up Python ${{ matrix.python-version }}

View File

@ -2,7 +2,7 @@ default_language_version:
python: python3.11 python: python3.11
repos: repos:
- repo: https://github.com/pre-commit/pre-commit-hooks - repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.4.0 rev: v4.5.0
hooks: hooks:
- id: check-merge-conflict - id: check-merge-conflict
- id: check-json - id: check-json
@ -15,12 +15,9 @@ repos:
- --autofix - --autofix
- id: trailing-whitespace - id: trailing-whitespace
exclude: README.md exclude: README.md
- repo: https://github.com/psf/black
rev: 23.7.0
hooks:
- id: black
- repo: https://github.com/astral-sh/ruff-pre-commit - repo: https://github.com/astral-sh/ruff-pre-commit
rev: v0.0.283 rev: v0.1.2
hooks: hooks:
- id: ruff - id: ruff
args: [--fix, --exit-non-zero-on-fix, --show-fixes] args: [--fix, --exit-non-zero-on-fix, --show-fixes]
- id: ruff-format

View File

@ -13,6 +13,7 @@ ignore = [
"B017", # pytest.raises(Exception) should be considered evil "B017", # pytest.raises(Exception) should be considered evil
"B028", # warnings.warn called without an explicit stacklevel keyword argument "B028", # warnings.warn called without an explicit stacklevel keyword argument
"B904", # check for raise statements in exception handlers that lack a from clause "B904", # check for raise statements in exception handlers that lack a from clause
"W191", # https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules
] ]
exclude = [ exclude = [
@ -29,5 +30,4 @@ target-version = "py38"
[isort] [isort]
known-first-party = ["graphene", "graphene-django"] known-first-party = ["graphene", "graphene-django"]
known-local-folder = ["cookbook"] known-local-folder = ["cookbook"]
force-wrap-aliases = true
combine-as-imports = true combine-as-imports = true

View File

@ -14,7 +14,7 @@ tests:
.PHONY: format ## Format code .PHONY: format ## Format code
format: format:
black graphene_django examples setup.py ruff format graphene_django examples setup.py
.PHONY: lint ## Lint code .PHONY: lint ## Lint code
lint: lint:

View File

@ -6,7 +6,7 @@ Graphene-Django can be customised using settings. This page explains each settin
Usage Usage
----- -----
Add settings to your Django project by creating a Dictonary with name ``GRAPHENE`` in the project's ``settings.py``: Add settings to your Django project by creating a Dictionary with name ``GRAPHENE`` in the project's ``settings.py``:
.. code:: python .. code:: python
@ -206,9 +206,6 @@ Set to ``False`` if you want to disable GraphiQL headers editor tab for some rea
This setting is passed to ``headerEditorEnabled`` GraphiQL options, for details refer to GraphiQLDocs_. This setting is passed to ``headerEditorEnabled`` GraphiQL options, for details refer to GraphiQLDocs_.
.. _GraphiQLDocs: https://github.com/graphql/graphiql/tree/main/packages/graphiql#options
Default: ``True`` Default: ``True``
.. code:: python .. code:: python
@ -239,8 +236,6 @@ Set to ``True`` if you want to persist GraphiQL headers after refreshing the pag
This setting is passed to ``shouldPersistHeaders`` GraphiQL options, for details refer to GraphiQLDocs_. This setting is passed to ``shouldPersistHeaders`` GraphiQL options, for details refer to GraphiQLDocs_.
.. _GraphiQLDocs: https://github.com/graphql/graphiql/tree/main/packages/graphiql#options
Default: ``False`` Default: ``False``
@ -249,3 +244,37 @@ Default: ``False``
GRAPHENE = { GRAPHENE = {
'GRAPHIQL_SHOULD_PERSIST_HEADERS': False, 'GRAPHIQL_SHOULD_PERSIST_HEADERS': False,
} }
``GRAPHIQL_INPUT_VALUE_DEPRECATION``
------------------------------------
Set to ``True`` if you want GraphiQL to show any deprecated fields on input object types' docs.
For example, having this schema:
.. code:: python
class MyMutationInputType(graphene.InputObjectType):
old_field = graphene.String(deprecation_reason="You should now use 'newField' instead.")
new_field = graphene.String()
class MyMutation(graphene.Mutation):
class Arguments:
input = types.MyMutationInputType()
GraphiQL will add a ``Show Deprecated Fields`` button to toggle information display on ``oldField`` and its deprecation
reason. Otherwise, you would get neither a button nor any information at all on ``oldField``.
This setting is passed to ``inputValueDeprecation`` GraphiQL options, for details refer to GraphiQLDocs_.
Default: ``False``
.. code:: python
GRAPHENE = {
'GRAPHIQL_INPUT_VALUE_DEPRECATION': False,
}
.. _GraphiQLDocs: https://graphiql-test.netlify.app/typedoc/modules/graphiql_react#graphiqlprovider-2

View File

@ -231,7 +231,7 @@
"fields": { "fields": {
"category": 3, "category": 3,
"name": "Newt", "name": "Newt",
"notes": "Braised and Confuesd" "notes": "Braised and Confused"
}, },
"model": "ingredients.ingredient", "model": "ingredients.ingredient",
"pk": 5 "pk": 5

View File

@ -28,7 +28,7 @@ def initialize():
# Yeah, technically it's Corellian. But it flew in the service of the rebels, # Yeah, technically it's Corellian. But it flew in the service of the rebels,
# so for the purposes of this demo it's a rebel ship. # so for the purposes of this demo it's a rebel ship.
falcon = Ship(id="4", name="Millenium Falcon", faction=rebels) falcon = Ship(id="4", name="Millennium Falcon", faction=rebels)
falcon.save() falcon.save()
homeOne = Ship(id="5", name="Home One", faction=rebels) homeOne = Ship(id="5", name="Home One", faction=rebels)

View File

@ -40,7 +40,7 @@ def test_mutations():
{"node": {"id": "U2hpcDox", "name": "X-Wing"}}, {"node": {"id": "U2hpcDox", "name": "X-Wing"}},
{"node": {"id": "U2hpcDoy", "name": "Y-Wing"}}, {"node": {"id": "U2hpcDoy", "name": "Y-Wing"}},
{"node": {"id": "U2hpcDoz", "name": "A-Wing"}}, {"node": {"id": "U2hpcDoz", "name": "A-Wing"}},
{"node": {"id": "U2hpcDo0", "name": "Millenium Falcon"}}, {"node": {"id": "U2hpcDo0", "name": "Millennium Falcon"}},
{"node": {"id": "U2hpcDo1", "name": "Home One"}}, {"node": {"id": "U2hpcDo1", "name": "Home One"}},
{"node": {"id": "U2hpcDo5", "name": "Peter"}}, {"node": {"id": "U2hpcDo5", "name": "Peter"}},
] ]

View File

@ -1,3 +1,6 @@
import sys
from pathlib import PurePath
# For backwards compatibility, we import JSONField to have it available for import via # For backwards compatibility, we import JSONField to have it available for import via
# this compat module (https://github.com/graphql-python/graphene-django/issues/1428). # this compat module (https://github.com/graphql-python/graphene-django/issues/1428).
# Django's JSONField is available in Django 3.2+ (the minimum version we support) # Django's JSONField is available in Django 3.2+ (the minimum version we support)
@ -19,4 +22,23 @@ try:
RangeField, RangeField,
) )
except ImportError: except ImportError:
IntegerRangeField, ArrayField, HStoreField, RangeField = (MissingType,) * 4 IntegerRangeField, HStoreField, RangeField = (MissingType,) * 3
# For unit tests we fake ArrayField using JSONFields
if any(
PurePath(sys.argv[0]).match(p)
for p in [
"**/pytest",
"**/py.test",
"**/pytest/__main__.py",
]
):
class ArrayField(JSONField):
def __init__(self, *args, **kwargs):
if len(args) > 0:
self.base_field = args[0]
super().__init__(**kwargs)
else:
ArrayField = MissingType

View File

@ -194,7 +194,7 @@ class DjangoConnectionField(ConnectionField):
enforce_first_or_last, enforce_first_or_last,
root, root,
info, info,
**args **args,
): ):
first = args.get("first") first = args.get("first")
last = args.get("last") last = args.get("last")

View File

@ -36,7 +36,7 @@ class DjangoFilterConnectionField(DjangoConnectionField):
extra_filter_meta=None, extra_filter_meta=None,
filterset_class=None, filterset_class=None,
*args, *args,
**kwargs **kwargs,
): ):
self._fields = fields self._fields = fields
self._provided_filterset_class = filterset_class self._provided_filterset_class = filterset_class

View File

@ -1,13 +1,36 @@
from django_filters.constants import EMPTY_VALUES from django_filters.constants import EMPTY_VALUES
from django_filters.filters import FilterMethod
from .typed_filter import TypedFilter from .typed_filter import TypedFilter
class ArrayFilterMethod(FilterMethod):
def __call__(self, qs, value):
if value is None:
return qs
return self.method(qs, self.f.field_name, value)
class ArrayFilter(TypedFilter): class ArrayFilter(TypedFilter):
""" """
Filter made for PostgreSQL ArrayField. Filter made for PostgreSQL ArrayField.
""" """
@TypedFilter.method.setter
def method(self, value):
"""
Override method setter so that in case a custom `method` is provided
(see documentation https://django-filter.readthedocs.io/en/stable/ref/filters.html#method),
it doesn't fall back to checking if the value is in `EMPTY_VALUES` (from the `__call__` method
of the `FilterMethod` class) and instead use our ArrayFilterMethod that consider empty lists as values.
Indeed when providing a `method` the `filter` method below is overridden and replaced by `FilterMethod(self)`
which means that the validation of the empty value is made by the `FilterMethod.__call__` method instead.
"""
TypedFilter.method.fset(self, value)
if value is not None:
self.filter = ArrayFilterMethod(self)
def filter(self, qs, value): def filter(self, qs, value):
""" """
Override the default filter class to check first whether the list is Override the default filter class to check first whether the list is

View File

@ -1,12 +1,36 @@
from django_filters.filters import FilterMethod
from .typed_filter import TypedFilter from .typed_filter import TypedFilter
class ListFilterMethod(FilterMethod):
def __call__(self, qs, value):
if value is None:
return qs
return self.method(qs, self.f.field_name, value)
class ListFilter(TypedFilter): class ListFilter(TypedFilter):
""" """
Filter that takes a list of value as input. Filter that takes a list of value as input.
It is for example used for `__in` filters. It is for example used for `__in` filters.
""" """
@TypedFilter.method.setter
def method(self, value):
"""
Override method setter so that in case a custom `method` is provided
(see documentation https://django-filter.readthedocs.io/en/stable/ref/filters.html#method),
it doesn't fall back to checking if the value is in `EMPTY_VALUES` (from the `__call__` method
of the `FilterMethod` class) and instead use our ListFilterMethod that consider empty lists as values.
Indeed when providing a `method` the `filter` method below is overridden and replaced by `FilterMethod(self)`
which means that the validation of the empty value is made by the `FilterMethod.__call__` method instead.
"""
TypedFilter.method.fset(self, value)
if value is not None:
self.filter = ListFilterMethod(self)
def filter(self, qs, value): def filter(self, qs, value):
""" """
Override the default filter class to check first whether the list is Override the default filter class to check first whether the list is

View File

@ -1,4 +1,4 @@
from unittest.mock import MagicMock from functools import reduce
import pytest import pytest
from django.db import models from django.db import models
@ -25,15 +25,15 @@ else:
) )
STORE = {"events": []}
class Event(models.Model): class Event(models.Model):
name = models.CharField(max_length=50) name = models.CharField(max_length=50)
tags = ArrayField(models.CharField(max_length=50)) tags = ArrayField(models.CharField(max_length=50))
tag_ids = ArrayField(models.IntegerField()) tag_ids = ArrayField(models.IntegerField())
random_field = ArrayField(models.BooleanField()) random_field = ArrayField(models.BooleanField())
def __repr__(self):
return f"Event [{self.name}]"
@pytest.fixture @pytest.fixture
def EventFilterSet(): def EventFilterSet():
@ -44,10 +44,18 @@ def EventFilterSet():
"name": ["exact", "contains"], "name": ["exact", "contains"],
} }
# Those are actually usable with our Query fixture bellow # Those are actually usable with our Query fixture below
tags__contains = ArrayFilter(field_name="tags", lookup_expr="contains") tags__contains = ArrayFilter(field_name="tags", lookup_expr="contains")
tags__overlap = ArrayFilter(field_name="tags", lookup_expr="overlap") tags__overlap = ArrayFilter(field_name="tags", lookup_expr="overlap")
tags = ArrayFilter(field_name="tags", lookup_expr="exact") tags = ArrayFilter(field_name="tags", lookup_expr="exact")
tags__len = ArrayFilter(
field_name="tags", lookup_expr="len", input_type=graphene.Int
)
tags__len__in = ArrayFilter(
field_name="tags",
method="tags__len__in_filter",
input_type=graphene.List(graphene.Int),
)
# Those are actually not usable and only to check type declarations # Those are actually not usable and only to check type declarations
tags_ids__contains = ArrayFilter(field_name="tag_ids", lookup_expr="contains") tags_ids__contains = ArrayFilter(field_name="tag_ids", lookup_expr="contains")
@ -61,6 +69,14 @@ def EventFilterSet():
) )
random_field = ArrayFilter(field_name="random_field", lookup_expr="exact") random_field = ArrayFilter(field_name="random_field", lookup_expr="exact")
def tags__len__in_filter(self, queryset, _name, value):
if not value:
return queryset.none()
return reduce(
lambda q1, q2: q1.union(q2),
[queryset.filter(tags__len=v) for v in value],
).distinct()
return EventFilterSet return EventFilterSet
@ -83,68 +99,94 @@ def Query(EventType):
we are running unit tests in sqlite which does not have ArrayFields. we are running unit tests in sqlite which does not have ArrayFields.
""" """
events = [
Event(name="Live Show", tags=["concert", "music", "rock"]),
Event(name="Musical", tags=["movie", "music"]),
Event(name="Ballet", tags=["concert", "dance"]),
Event(name="Speech", tags=[]),
]
class Query(graphene.ObjectType): class Query(graphene.ObjectType):
events = DjangoFilterConnectionField(EventType) events = DjangoFilterConnectionField(EventType)
def resolve_events(self, info, **kwargs): def resolve_events(self, info, **kwargs):
events = [ class FakeQuerySet(QuerySet):
Event(name="Live Show", tags=["concert", "music", "rock"]), def __init__(self, model=None):
Event(name="Musical", tags=["movie", "music"]), self.model = Event
Event(name="Ballet", tags=["concert", "dance"]), self.__store = list(events)
Event(name="Speech", tags=[]),
]
STORE["events"] = events def all(self):
return self
m_queryset = MagicMock(spec=QuerySet) def filter(self, **kwargs):
m_queryset.model = Event queryset = FakeQuerySet()
queryset.__store = list(self.__store)
def filter_events(**kwargs): if "tags__contains" in kwargs:
if "tags__contains" in kwargs: queryset.__store = list(
STORE["events"] = list( filter(
filter( lambda e: set(kwargs["tags__contains"]).issubset(
lambda e: set(kwargs["tags__contains"]).issubset( set(e.tags)
set(e.tags) ),
), queryset.__store,
STORE["events"], )
) )
) if "tags__overlap" in kwargs:
if "tags__overlap" in kwargs: queryset.__store = list(
STORE["events"] = list( filter(
filter( lambda e: not set(kwargs["tags__overlap"]).isdisjoint(
lambda e: not set(kwargs["tags__overlap"]).isdisjoint( set(e.tags)
set(e.tags) ),
), queryset.__store,
STORE["events"], )
) )
) if "tags__exact" in kwargs:
if "tags__exact" in kwargs: queryset.__store = list(
STORE["events"] = list( filter(
filter( lambda e: set(kwargs["tags__exact"]) == set(e.tags),
lambda e: set(kwargs["tags__exact"]) == set(e.tags), queryset.__store,
STORE["events"], )
) )
) if "tags__len" in kwargs:
queryset.__store = list(
filter(
lambda e: len(e.tags) == kwargs["tags__len"],
queryset.__store,
)
)
return queryset
def mock_queryset_filter(*args, **kwargs): def union(self, *args):
filter_events(**kwargs) queryset = FakeQuerySet()
return m_queryset queryset.__store = self.__store
for arg in args:
queryset.__store += arg.__store
return queryset
def mock_queryset_none(*args, **kwargs): def none(self):
STORE["events"] = [] queryset = FakeQuerySet()
return m_queryset queryset.__store = []
return queryset
def mock_queryset_count(*args, **kwargs): def count(self):
return len(STORE["events"]) return len(self.__store)
m_queryset.all.return_value = m_queryset def distinct(self):
m_queryset.filter.side_effect = mock_queryset_filter queryset = FakeQuerySet()
m_queryset.none.side_effect = mock_queryset_none queryset.__store = []
m_queryset.count.side_effect = mock_queryset_count for event in self.__store:
m_queryset.__getitem__.side_effect = lambda index: STORE[ if event not in queryset.__store:
"events" queryset.__store.append(event)
].__getitem__(index) queryset.__store = sorted(queryset.__store, key=lambda e: e.name)
return queryset
return m_queryset def __getitem__(self, index):
return self.__store[index]
return FakeQuerySet()
return Query return Query
@pytest.fixture
def schema(Query):
return graphene.Schema(query=Query)

View File

@ -1,18 +1,14 @@
import pytest import pytest
from graphene import Schema
from ...compat import ArrayField, MissingType from ...compat import ArrayField, MissingType
@pytest.mark.skipif(ArrayField is MissingType, reason="ArrayField should exist") @pytest.mark.skipif(ArrayField is MissingType, reason="ArrayField should exist")
def test_array_field_contains_multiple(Query): def test_array_field_contains_multiple(schema):
""" """
Test contains filter on a array field of string. Test contains filter on a array field of string.
""" """
schema = Schema(query=Query)
query = """ query = """
query { query {
events (tags_Contains: ["concert", "music"]) { events (tags_Contains: ["concert", "music"]) {
@ -32,13 +28,11 @@ def test_array_field_contains_multiple(Query):
@pytest.mark.skipif(ArrayField is MissingType, reason="ArrayField should exist") @pytest.mark.skipif(ArrayField is MissingType, reason="ArrayField should exist")
def test_array_field_contains_one(Query): def test_array_field_contains_one(schema):
""" """
Test contains filter on a array field of string. Test contains filter on a array field of string.
""" """
schema = Schema(query=Query)
query = """ query = """
query { query {
events (tags_Contains: ["music"]) { events (tags_Contains: ["music"]) {
@ -59,13 +53,11 @@ def test_array_field_contains_one(Query):
@pytest.mark.skipif(ArrayField is MissingType, reason="ArrayField should exist") @pytest.mark.skipif(ArrayField is MissingType, reason="ArrayField should exist")
def test_array_field_contains_empty_list(Query): def test_array_field_contains_empty_list(schema):
""" """
Test contains filter on a array field of string. Test contains filter on a array field of string.
""" """
schema = Schema(query=Query)
query = """ query = """
query { query {
events (tags_Contains: []) { events (tags_Contains: []) {

View File

@ -0,0 +1,186 @@
import pytest
from ...compat import ArrayField, MissingType
@pytest.mark.skipif(ArrayField is MissingType, reason="ArrayField should exist")
def test_array_field_len_filter(schema):
query = """
query {
events (tags_Len: 2) {
edges {
node {
name
}
}
}
}
"""
result = schema.execute(query)
assert not result.errors
assert result.data["events"]["edges"] == [
{"node": {"name": "Musical"}},
{"node": {"name": "Ballet"}},
]
query = """
query {
events (tags_Len: 0) {
edges {
node {
name
}
}
}
}
"""
result = schema.execute(query)
assert not result.errors
assert result.data["events"]["edges"] == [
{"node": {"name": "Speech"}},
]
query = """
query {
events (tags_Len: 10) {
edges {
node {
name
}
}
}
}
"""
result = schema.execute(query)
assert not result.errors
assert result.data["events"]["edges"] == []
query = """
query {
events (tags_Len: "2") {
edges {
node {
name
}
}
}
}
"""
result = schema.execute(query)
assert len(result.errors) == 1
assert result.errors[0].message == 'Int cannot represent non-integer value: "2"'
query = """
query {
events (tags_Len: True) {
edges {
node {
name
}
}
}
}
"""
result = schema.execute(query)
assert len(result.errors) == 1
assert result.errors[0].message == "Int cannot represent non-integer value: True"
@pytest.mark.skipif(ArrayField is MissingType, reason="ArrayField should exist")
def test_array_field_custom_filter(schema):
query = """
query {
events (tags_Len_In: 2) {
edges {
node {
name
}
}
}
}
"""
result = schema.execute(query)
assert not result.errors
assert result.data["events"]["edges"] == [
{"node": {"name": "Ballet"}},
{"node": {"name": "Musical"}},
]
query = """
query {
events (tags_Len_In: [0, 2]) {
edges {
node {
name
}
}
}
}
"""
result = schema.execute(query)
assert not result.errors
assert result.data["events"]["edges"] == [
{"node": {"name": "Ballet"}},
{"node": {"name": "Musical"}},
{"node": {"name": "Speech"}},
]
query = """
query {
events (tags_Len_In: [10]) {
edges {
node {
name
}
}
}
}
"""
result = schema.execute(query)
assert not result.errors
assert result.data["events"]["edges"] == []
query = """
query {
events (tags_Len_In: []) {
edges {
node {
name
}
}
}
}
"""
result = schema.execute(query)
assert not result.errors
assert result.data["events"]["edges"] == []
query = """
query {
events (tags_Len_In: "12") {
edges {
node {
name
}
}
}
}
"""
result = schema.execute(query)
assert len(result.errors) == 1
assert result.errors[0].message == 'Int cannot represent non-integer value: "12"'
query = """
query {
events (tags_Len_In: True) {
edges {
node {
name
}
}
}
}
"""
result = schema.execute(query)
assert len(result.errors) == 1
assert result.errors[0].message == "Int cannot represent non-integer value: True"

View File

@ -1,18 +1,14 @@
import pytest import pytest
from graphene import Schema
from ...compat import ArrayField, MissingType from ...compat import ArrayField, MissingType
@pytest.mark.skipif(ArrayField is MissingType, reason="ArrayField should exist") @pytest.mark.skipif(ArrayField is MissingType, reason="ArrayField should exist")
def test_array_field_exact_no_match(Query): def test_array_field_exact_no_match(schema):
""" """
Test exact filter on a array field of string. Test exact filter on a array field of string.
""" """
schema = Schema(query=Query)
query = """ query = """
query { query {
events (tags: ["concert", "music"]) { events (tags: ["concert", "music"]) {
@ -30,13 +26,11 @@ def test_array_field_exact_no_match(Query):
@pytest.mark.skipif(ArrayField is MissingType, reason="ArrayField should exist") @pytest.mark.skipif(ArrayField is MissingType, reason="ArrayField should exist")
def test_array_field_exact_match(Query): def test_array_field_exact_match(schema):
""" """
Test exact filter on a array field of string. Test exact filter on a array field of string.
""" """
schema = Schema(query=Query)
query = """ query = """
query { query {
events (tags: ["movie", "music"]) { events (tags: ["movie", "music"]) {
@ -56,13 +50,11 @@ def test_array_field_exact_match(Query):
@pytest.mark.skipif(ArrayField is MissingType, reason="ArrayField should exist") @pytest.mark.skipif(ArrayField is MissingType, reason="ArrayField should exist")
def test_array_field_exact_empty_list(Query): def test_array_field_exact_empty_list(schema):
""" """
Test exact filter on a array field of string. Test exact filter on a array field of string.
""" """
schema = Schema(query=Query)
query = """ query = """
query { query {
events (tags: []) { events (tags: []) {
@ -82,11 +74,10 @@ def test_array_field_exact_empty_list(Query):
@pytest.mark.skipif(ArrayField is MissingType, reason="ArrayField should exist") @pytest.mark.skipif(ArrayField is MissingType, reason="ArrayField should exist")
def test_array_field_filter_schema_type(Query): def test_array_field_filter_schema_type(schema):
""" """
Check that the type in the filter is an array field like on the object type. Check that the type in the filter is an array field like on the object type.
""" """
schema = Schema(query=Query)
schema_str = str(schema) schema_str = str(schema)
assert ( assert (
@ -112,6 +103,8 @@ def test_array_field_filter_schema_type(Query):
"tags_Contains": "[String!]", "tags_Contains": "[String!]",
"tags_Overlap": "[String!]", "tags_Overlap": "[String!]",
"tags": "[String!]", "tags": "[String!]",
"tags_Len": "Int",
"tags_Len_In": "[Int]",
"tagsIds_Contains": "[Int!]", "tagsIds_Contains": "[Int!]",
"tagsIds_Overlap": "[Int!]", "tagsIds_Overlap": "[Int!]",
"tagsIds": "[Int!]", "tagsIds": "[Int!]",

View File

@ -1,18 +1,14 @@
import pytest import pytest
from graphene import Schema
from ...compat import ArrayField, MissingType from ...compat import ArrayField, MissingType
@pytest.mark.skipif(ArrayField is MissingType, reason="ArrayField should exist") @pytest.mark.skipif(ArrayField is MissingType, reason="ArrayField should exist")
def test_array_field_overlap_multiple(Query): def test_array_field_overlap_multiple(schema):
""" """
Test overlap filter on a array field of string. Test overlap filter on a array field of string.
""" """
schema = Schema(query=Query)
query = """ query = """
query { query {
events (tags_Overlap: ["concert", "music"]) { events (tags_Overlap: ["concert", "music"]) {
@ -34,13 +30,11 @@ def test_array_field_overlap_multiple(Query):
@pytest.mark.skipif(ArrayField is MissingType, reason="ArrayField should exist") @pytest.mark.skipif(ArrayField is MissingType, reason="ArrayField should exist")
def test_array_field_overlap_one(Query): def test_array_field_overlap_one(schema):
""" """
Test overlap filter on a array field of string. Test overlap filter on a array field of string.
""" """
schema = Schema(query=Query)
query = """ query = """
query { query {
events (tags_Overlap: ["music"]) { events (tags_Overlap: ["music"]) {
@ -61,13 +55,11 @@ def test_array_field_overlap_one(Query):
@pytest.mark.skipif(ArrayField is MissingType, reason="ArrayField should exist") @pytest.mark.skipif(ArrayField is MissingType, reason="ArrayField should exist")
def test_array_field_overlap_empty_list(Query): def test_array_field_overlap_empty_list(schema):
""" """
Test overlap filter on a array field of string. Test overlap filter on a array field of string.
""" """
schema = Schema(query=Query)
query = """ query = """
query { query {
events (tags_Overlap: []) { events (tags_Overlap: []) {

View File

@ -789,7 +789,7 @@ def test_order_by():
query = """ query = """
query NodeFilteringQuery { query NodeFilteringQuery {
allReporters(orderBy: "-firtsnaMe") { allReporters(orderBy: "-firstname") {
edges { edges {
node { node {
firstName firstName
@ -802,7 +802,7 @@ def test_order_by():
assert result.errors assert result.errors
def test_order_by_is_perserved(): def test_order_by_is_preserved():
class ReporterType(DjangoObjectType): class ReporterType(DjangoObjectType):
class Meta: class Meta:
model = Reporter model = Reporter

View File

@ -1,4 +1,8 @@
import operator
from functools import reduce
import pytest import pytest
from django.db.models import Q
from django_filters import FilterSet from django_filters import FilterSet
import graphene import graphene
@ -44,6 +48,10 @@ def schema():
only_first = TypedFilter( only_first = TypedFilter(
input_type=graphene.Boolean, method="only_first_filter" input_type=graphene.Boolean, method="only_first_filter"
) )
headline_search = ListFilter(
method="headline_search_filter",
input_type=graphene.List(graphene.String),
)
def first_n_filter(self, queryset, _name, value): def first_n_filter(self, queryset, _name, value):
return queryset[:value] return queryset[:value]
@ -54,6 +62,13 @@ def schema():
else: else:
return queryset return queryset
def headline_search_filter(self, queryset, _name, value):
if not value:
return queryset.none()
return queryset.filter(
reduce(operator.or_, [Q(headline__icontains=v) for v in value])
)
class ArticleType(DjangoObjectType): class ArticleType(DjangoObjectType):
class Meta: class Meta:
model = Article model = Article
@ -87,6 +102,7 @@ def test_typed_filter_schema(schema):
"lang_InStr": "[String]", "lang_InStr": "[String]",
"firstN": "Int", "firstN": "Int",
"onlyFirst": "Boolean", "onlyFirst": "Boolean",
"headlineSearch": "[String]",
} }
all_articles_filters = ( all_articles_filters = (
@ -104,24 +120,7 @@ def test_typed_filters_work(schema):
Article.objects.create(headline="A", reporter=reporter, editor=reporter, lang="es") Article.objects.create(headline="A", reporter=reporter, editor=reporter, lang="es")
Article.objects.create(headline="B", reporter=reporter, editor=reporter, lang="es") Article.objects.create(headline="B", reporter=reporter, editor=reporter, lang="es")
Article.objects.create(headline="C", reporter=reporter, editor=reporter, lang="en") Article.objects.create(headline="C", reporter=reporter, editor=reporter, lang="en")
Article.objects.create(headline="AB", reporter=reporter, editor=reporter, lang="es")
query = "query { articles (lang_In: [ES]) { edges { node { headline } } } }"
result = schema.execute(query)
assert not result.errors
assert result.data["articles"]["edges"] == [
{"node": {"headline": "A"}},
{"node": {"headline": "B"}},
]
query = 'query { articles (lang_InStr: ["es"]) { edges { node { headline } } } }'
result = schema.execute(query)
assert not result.errors
assert result.data["articles"]["edges"] == [
{"node": {"headline": "A"}},
{"node": {"headline": "B"}},
]
query = 'query { articles (lang_Contains: "n") { edges { node { headline } } } }' query = 'query { articles (lang_Contains: "n") { edges { node { headline } } } }'
@ -137,7 +136,7 @@ def test_typed_filters_work(schema):
assert not result.errors assert not result.errors
assert result.data["articles"]["edges"] == [ assert result.data["articles"]["edges"] == [
{"node": {"headline": "A"}}, {"node": {"headline": "A"}},
{"node": {"headline": "B"}}, {"node": {"headline": "AB"}},
] ]
query = "query { articles (onlyFirst: true) { edges { node { headline } } } }" query = "query { articles (onlyFirst: true) { edges { node { headline } } } }"
@ -147,3 +146,86 @@ def test_typed_filters_work(schema):
assert result.data["articles"]["edges"] == [ assert result.data["articles"]["edges"] == [
{"node": {"headline": "A"}}, {"node": {"headline": "A"}},
] ]
def test_list_filters_work(schema):
reporter = Reporter.objects.create(first_name="John", last_name="Doe", email="")
Article.objects.create(headline="A", reporter=reporter, editor=reporter, lang="es")
Article.objects.create(headline="B", reporter=reporter, editor=reporter, lang="es")
Article.objects.create(headline="C", reporter=reporter, editor=reporter, lang="en")
Article.objects.create(headline="AB", reporter=reporter, editor=reporter, lang="es")
query = "query { articles (lang_In: [ES]) { edges { node { headline } } } }"
result = schema.execute(query)
assert not result.errors
assert result.data["articles"]["edges"] == [
{"node": {"headline": "A"}},
{"node": {"headline": "AB"}},
{"node": {"headline": "B"}},
]
query = 'query { articles (lang_InStr: ["es"]) { edges { node { headline } } } }'
result = schema.execute(query)
assert not result.errors
assert result.data["articles"]["edges"] == [
{"node": {"headline": "A"}},
{"node": {"headline": "AB"}},
{"node": {"headline": "B"}},
]
query = "query { articles (lang_InStr: []) { edges { node { headline } } } }"
result = schema.execute(query)
assert not result.errors
assert result.data["articles"]["edges"] == []
query = "query { articles (lang_InStr: null) { edges { node { headline } } } }"
result = schema.execute(query)
assert not result.errors
assert result.data["articles"]["edges"] == [
{"node": {"headline": "A"}},
{"node": {"headline": "AB"}},
{"node": {"headline": "B"}},
{"node": {"headline": "C"}},
]
query = 'query { articles (headlineSearch: ["a", "B"]) { edges { node { headline } } } }'
result = schema.execute(query)
assert not result.errors
assert result.data["articles"]["edges"] == [
{"node": {"headline": "A"}},
{"node": {"headline": "AB"}},
{"node": {"headline": "B"}},
]
query = "query { articles (headlineSearch: []) { edges { node { headline } } } }"
result = schema.execute(query)
assert not result.errors
assert result.data["articles"]["edges"] == []
query = "query { articles (headlineSearch: null) { edges { node { headline } } } }"
result = schema.execute(query)
assert not result.errors
assert result.data["articles"]["edges"] == [
{"node": {"headline": "A"}},
{"node": {"headline": "AB"}},
{"node": {"headline": "B"}},
{"node": {"headline": "C"}},
]
query = 'query { articles (headlineSearch: [""]) { edges { node { headline } } } }'
result = schema.execute(query)
assert not result.errors
assert result.data["articles"]["edges"] == [
{"node": {"headline": "A"}},
{"node": {"headline": "AB"}},
{"node": {"headline": "B"}},
{"node": {"headline": "C"}},
]

View File

@ -43,7 +43,7 @@ def get_filtering_args_from_filterset(filterset_class, type):
isinstance(filter_field, TypedFilter) isinstance(filter_field, TypedFilter)
and filter_field.input_type is not None and filter_field.input_type is not None
): ):
# First check if the filter input type has been explicitely given # First check if the filter input type has been explicitly given
field_type = filter_field.input_type field_type = filter_field.input_type
else: else:
if name not in filterset_class.declared_filters or isinstance( if name not in filterset_class.declared_filters or isinstance(
@ -145,7 +145,7 @@ def replace_csv_filters(filterset_class):
label=filter_field.label, label=filter_field.label,
method=filter_field.method, method=filter_field.method,
exclude=filter_field.exclude, exclude=filter_field.exclude,
**filter_field.extra **filter_field.extra,
) )
elif filter_type == "range": elif filter_type == "range":
filterset_class.base_filters[name] = RangeFilter( filterset_class.base_filters[name] = RangeFilter(
@ -154,5 +154,5 @@ def replace_csv_filters(filterset_class):
label=filter_field.label, label=filter_field.label,
method=filter_field.method, method=filter_field.method,
exclude=filter_field.exclude, exclude=filter_field.exclude,
**filter_field.extra **filter_field.extra,
) )

View File

@ -23,8 +23,7 @@ def fields_for_form(form, only_fields, exclude_fields):
for name, field in form.fields.items(): for name, field in form.fields.items():
is_not_in_only = only_fields and name not in only_fields is_not_in_only = only_fields and name not in only_fields
is_excluded = ( is_excluded = (
name name in exclude_fields # or
in exclude_fields # or
# name in already_created_fields # name in already_created_fields
) )

View File

@ -4,7 +4,7 @@ from graphene.types.inputobjecttype import InputObjectType
from graphene.utils.str_converters import to_camel_case from graphene.utils.str_converters import to_camel_case
from ..converter import BlankValueField from ..converter import BlankValueField
from ..types import ErrorType # noqa Import ErrorType for backwards compatability from ..types import ErrorType # noqa Import ErrorType for backwards compatibility
from .mutation import fields_for_form from .mutation import fields_for_form
@ -60,7 +60,7 @@ class DjangoFormInputObjectType(InputObjectType):
and isinstance(object_type._meta.fields[name], BlankValueField) and isinstance(object_type._meta.fields[name], BlankValueField)
): ):
# Field type BlankValueField here means that field # Field type BlankValueField here means that field
# with choises have been converted to enum # with choices have been converted to enum
# (BlankValueField is using only for that task ?) # (BlankValueField is using only for that task ?)
setattr(cls, name, cls.get_enum_cnv_cls_instance(name, object_type)) setattr(cls, name, cls.get_enum_cnv_cls_instance(name, object_type))
elif ( elif (

View File

@ -19,6 +19,7 @@ class SerializerMutationOptions(MutationOptions):
model_class = None model_class = None
model_operations = ["create", "update"] model_operations = ["create", "update"]
serializer_class = None serializer_class = None
optional_fields = ()
def fields_for_serializer( def fields_for_serializer(
@ -28,6 +29,7 @@ def fields_for_serializer(
is_input=False, is_input=False,
convert_choices_to_enum=True, convert_choices_to_enum=True,
lookup_field=None, lookup_field=None,
optional_fields=(),
): ):
fields = OrderedDict() fields = OrderedDict()
for name, field in serializer.fields.items(): for name, field in serializer.fields.items():
@ -48,9 +50,13 @@ def fields_for_serializer(
if is_not_in_only or is_excluded: if is_not_in_only or is_excluded:
continue continue
is_optional = name in optional_fields or "__all__" in optional_fields
fields[name] = convert_serializer_field( fields[name] = convert_serializer_field(
field, is_input=is_input, convert_choices_to_enum=convert_choices_to_enum field,
is_input=is_input,
convert_choices_to_enum=convert_choices_to_enum,
force_optional=is_optional,
) )
return fields return fields
@ -74,7 +80,8 @@ class SerializerMutation(ClientIDMutation):
exclude_fields=(), exclude_fields=(),
convert_choices_to_enum=True, convert_choices_to_enum=True,
_meta=None, _meta=None,
**options optional_fields=(),
**options,
): ):
if not serializer_class: if not serializer_class:
raise Exception("serializer_class is required for the SerializerMutation") raise Exception("serializer_class is required for the SerializerMutation")
@ -98,6 +105,7 @@ class SerializerMutation(ClientIDMutation):
is_input=True, is_input=True,
convert_choices_to_enum=convert_choices_to_enum, convert_choices_to_enum=convert_choices_to_enum,
lookup_field=lookup_field, lookup_field=lookup_field,
optional_fields=optional_fields,
) )
output_fields = fields_for_serializer( output_fields = fields_for_serializer(
serializer, serializer,

View File

@ -18,7 +18,9 @@ def get_graphene_type_from_serializer_field(field):
) )
def convert_serializer_field(field, is_input=True, convert_choices_to_enum=True): def convert_serializer_field(
field, is_input=True, convert_choices_to_enum=True, force_optional=False
):
""" """
Converts a django rest frameworks field to a graphql field Converts a django rest frameworks field to a graphql field
and marks the field as required if we are creating an input type and marks the field as required if we are creating an input type
@ -31,7 +33,10 @@ def convert_serializer_field(field, is_input=True, convert_choices_to_enum=True)
graphql_type = get_graphene_type_from_serializer_field(field) graphql_type = get_graphene_type_from_serializer_field(field)
args = [] args = []
kwargs = {"description": field.help_text, "required": is_input and field.required} kwargs = {
"description": field.help_text,
"required": is_input and field.required and not force_optional,
}
# if it is a tuple or a list it means that we are returning # if it is a tuple or a list it means that we are returning
# the graphql type and the child type # the graphql type and the child type

View File

@ -3,7 +3,7 @@ import datetime
from pytest import raises from pytest import raises
from rest_framework import serializers from rest_framework import serializers
from graphene import Field, ResolveInfo from graphene import Field, ResolveInfo, String
from graphene.types.inputobjecttype import InputObjectType from graphene.types.inputobjecttype import InputObjectType
from ...types import DjangoObjectType from ...types import DjangoObjectType
@ -105,6 +105,16 @@ def test_exclude_fields():
assert "created" not in MyMutation.Input._meta.fields assert "created" not in MyMutation.Input._meta.fields
def test_model_serializer_optional_fields():
class MyMutation(SerializerMutation):
class Meta:
serializer_class = MyModelSerializer
optional_fields = ("cool_name",)
assert "cool_name" in MyMutation.Input._meta.fields
assert MyMutation.Input._meta.fields["cool_name"].type == String
def test_write_only_field(): def test_write_only_field():
class WriteOnlyFieldModelSerializer(serializers.ModelSerializer): class WriteOnlyFieldModelSerializer(serializers.ModelSerializer):
password = serializers.CharField(write_only=True) password = serializers.CharField(write_only=True)
@ -265,7 +275,7 @@ def test_perform_mutate_success():
result = MyMethodMutation.mutate_and_get_payload( result = MyMethodMutation.mutate_and_get_payload(
None, None,
mock_info(), mock_info(),
**{"cool_name": "Narf", "last_edited": datetime.date(2020, 1, 4)} **{"cool_name": "Narf", "last_edited": datetime.date(2020, 1, 4)},
) )
assert result.errors is None assert result.errors is None

View File

@ -42,6 +42,7 @@ DEFAULTS = {
# https://github.com/graphql/graphiql/tree/main/packages/graphiql#options # https://github.com/graphql/graphiql/tree/main/packages/graphiql#options
"GRAPHIQL_HEADER_EDITOR_ENABLED": True, "GRAPHIQL_HEADER_EDITOR_ENABLED": True,
"GRAPHIQL_SHOULD_PERSIST_HEADERS": False, "GRAPHIQL_SHOULD_PERSIST_HEADERS": False,
"GRAPHIQL_INPUT_VALUE_DEPRECATION": False,
"ATOMIC_MUTATIONS": False, "ATOMIC_MUTATIONS": False,
"TESTING_ENDPOINT": "/graphql", "TESTING_ENDPOINT": "/graphql",
} }

View File

@ -122,6 +122,7 @@
onEditOperationName: onEditOperationName, onEditOperationName: onEditOperationName,
isHeadersEditorEnabled: GRAPHENE_SETTINGS.graphiqlHeaderEditorEnabled, isHeadersEditorEnabled: GRAPHENE_SETTINGS.graphiqlHeaderEditorEnabled,
shouldPersistHeaders: GRAPHENE_SETTINGS.graphiqlShouldPersistHeaders, shouldPersistHeaders: GRAPHENE_SETTINGS.graphiqlShouldPersistHeaders,
inputValueDeprecation: GRAPHENE_SETTINGS.graphiqlInputValueDeprecation,
query: query, query: query,
}; };
if (parameters.variables) { if (parameters.variables) {

View File

@ -54,6 +54,7 @@ add "&raw" to the end of the URL within a browser.
{% endif %} {% endif %}
graphiqlHeaderEditorEnabled: {{ graphiql_header_editor_enabled|yesno:"true,false" }}, graphiqlHeaderEditorEnabled: {{ graphiql_header_editor_enabled|yesno:"true,false" }},
graphiqlShouldPersistHeaders: {{ graphiql_should_persist_headers|yesno:"true,false" }}, graphiqlShouldPersistHeaders: {{ graphiql_should_persist_headers|yesno:"true,false" }},
graphiqlInputValueDeprecation: {{ graphiql_input_value_deprecation|yesno:"true,false" }},
}; };
</script> </script>
<script src="{% static 'graphene_django/graphiql.js' %}"></script> <script src="{% static 'graphene_django/graphiql.js' %}"></script>

View File

@ -97,7 +97,7 @@ class CNNReporter(Reporter):
class APNewsReporter(Reporter): class APNewsReporter(Reporter):
""" """
This class only inherits from Reporter for testing multi table inheritence This class only inherits from Reporter for testing multi table inheritance
similar to what you'd see in django-polymorphic similar to what you'd see in django-polymorphic
""" """

View File

@ -102,10 +102,8 @@ def validate_fields(type_, model, fields, only_fields, exclude_fields):
if name in all_field_names: if name in all_field_names:
# Field is a custom field # Field is a custom field
warnings.warn( warnings.warn(
( f'Excluding the custom field "{name}" on DjangoObjectType "{type_}" has no effect. '
'Excluding the custom field "{field_name}" on DjangoObjectType "{type_}" has no effect. ' 'Either remove the custom field or remove the field from the "exclude" list.'
'Either remove the custom field or remove the field from the "exclude" list.'
).format(field_name=name, type_=type_)
) )
else: else:
if not hasattr(model, name): if not hasattr(model, name):

View File

@ -4,6 +4,7 @@ import warnings
from django.test import Client, TestCase, TransactionTestCase from django.test import Client, TestCase, TransactionTestCase
from graphene_django.settings import graphene_settings from graphene_django.settings import graphene_settings
from graphene_django.utils.utils import _DJANGO_VERSION_AT_LEAST_4_2
DEFAULT_GRAPHQL_URL = "/graphql" DEFAULT_GRAPHQL_URL = "/graphql"
@ -55,8 +56,14 @@ def graphql_query(
else: else:
body["variables"] = {"input": input_data} body["variables"] = {"input": input_data}
if headers: if headers:
header_params = (
{"headers": headers} if _DJANGO_VERSION_AT_LEAST_4_2 else headers
)
resp = client.post( resp = client.post(
graphql_url, json.dumps(body), content_type="application/json", **headers graphql_url,
json.dumps(body),
content_type="application/json",
**header_params,
) )
else: else:
resp = client.post( resp = client.post(

View File

@ -1,5 +1,6 @@
import inspect import inspect
import django
from django.db import connection, models, transaction from django.db import connection, models, transaction
from django.db.models.manager import Manager from django.db.models.manager import Manager
from django.utils.encoding import force_str from django.utils.encoding import force_str
@ -145,3 +146,8 @@ def bypass_get_queryset(resolver):
""" """
resolver._bypass_get_queryset = True resolver._bypass_get_queryset = True
return resolver return resolver
_DJANGO_VERSION_AT_LEAST_4_2 = django.VERSION[0] > 4 or (
django.VERSION[0] >= 4 and django.VERSION[1] >= 2
)

View File

@ -9,10 +9,17 @@ from django.shortcuts import render
from django.utils.decorators import method_decorator from django.utils.decorators import method_decorator
from django.views.decorators.csrf import ensure_csrf_cookie from django.views.decorators.csrf import ensure_csrf_cookie
from django.views.generic import View from django.views.generic import View
from graphql import OperationType, get_operation_ast, parse from graphql import (
ExecutionResult,
OperationType,
execute,
get_operation_ast,
parse,
validate_schema,
)
from graphql.error import GraphQLError from graphql.error import GraphQLError
from graphql.execution import ExecutionResult
from graphql.execution.middleware import MiddlewareManager from graphql.execution.middleware import MiddlewareManager
from graphql.validation import validate
from graphene import Schema from graphene import Schema
from graphene_django.constants import MUTATION_ERRORS_FLAG from graphene_django.constants import MUTATION_ERRORS_FLAG
@ -167,11 +174,13 @@ class GraphQLView(View):
subscriptions_transport_ws_sri=self.subscriptions_transport_ws_sri, subscriptions_transport_ws_sri=self.subscriptions_transport_ws_sri,
graphiql_plugin_explorer_version=self.graphiql_plugin_explorer_version, graphiql_plugin_explorer_version=self.graphiql_plugin_explorer_version,
graphiql_plugin_explorer_sri=self.graphiql_plugin_explorer_sri, graphiql_plugin_explorer_sri=self.graphiql_plugin_explorer_sri,
graphiql_plugin_explorer_css_sri=self.graphiql_plugin_explorer_css_sri,
# The SUBSCRIPTION_PATH setting. # The SUBSCRIPTION_PATH setting.
subscription_path=self.subscription_path, subscription_path=self.subscription_path,
# GraphiQL headers tab, # GraphiQL headers tab,
graphiql_header_editor_enabled=graphene_settings.GRAPHIQL_HEADER_EDITOR_ENABLED, graphiql_header_editor_enabled=graphene_settings.GRAPHIQL_HEADER_EDITOR_ENABLED,
graphiql_should_persist_headers=graphene_settings.GRAPHIQL_SHOULD_PERSIST_HEADERS, graphiql_should_persist_headers=graphene_settings.GRAPHIQL_SHOULD_PERSIST_HEADERS,
graphiql_input_value_deprecation=graphene_settings.GRAPHIQL_INPUT_VALUE_DEPRECATION,
) )
if self.batch: if self.batch:
@ -293,43 +302,56 @@ class GraphQLView(View):
return None return None
raise HttpError(HttpResponseBadRequest("Must provide query string.")) raise HttpError(HttpResponseBadRequest("Must provide query string."))
schema = self.schema.graphql_schema
schema_validation_errors = validate_schema(schema)
if schema_validation_errors:
return ExecutionResult(data=None, errors=schema_validation_errors)
try: try:
document = parse(query) document = parse(query)
except Exception as e: except Exception as e:
return ExecutionResult(errors=[e]) return ExecutionResult(errors=[e])
if request.method.lower() == "get": operation_ast = get_operation_ast(document, operation_name)
operation_ast = get_operation_ast(document, operation_name)
if operation_ast and operation_ast.operation != OperationType.QUERY:
if show_graphiql:
return None
raise HttpError( if (
HttpResponseNotAllowed( request.method.lower() == "get"
["POST"], and operation_ast is not None
"Can only perform a {} operation from a POST request.".format( and operation_ast.operation != OperationType.QUERY
operation_ast.operation.value ):
), if show_graphiql:
) return None
raise HttpError(
HttpResponseNotAllowed(
["POST"],
"Can only perform a {} operation from a POST request.".format(
operation_ast.operation.value
),
) )
try: )
extra_options = {}
if self.execution_context_class:
extra_options["execution_context_class"] = self.execution_context_class
options = { validation_errors = validate(schema, document)
"source": query,
if validation_errors:
return ExecutionResult(data=None, errors=validation_errors)
try:
execute_options = {
"root_value": self.get_root_value(request), "root_value": self.get_root_value(request),
"context_value": self.get_context(request),
"variable_values": variables, "variable_values": variables,
"operation_name": operation_name, "operation_name": operation_name,
"context_value": self.get_context(request),
"middleware": self.get_middleware(request), "middleware": self.get_middleware(request),
} }
options.update(extra_options) if self.execution_context_class:
execute_options[
"execution_context_class"
] = self.execution_context_class
operation_ast = get_operation_ast(document, operation_name)
if ( if (
operation_ast operation_ast is not None
and operation_ast.operation == OperationType.MUTATION and operation_ast.operation == OperationType.MUTATION
and ( and (
graphene_settings.ATOMIC_MUTATIONS is True graphene_settings.ATOMIC_MUTATIONS is True
@ -337,12 +359,12 @@ class GraphQLView(View):
) )
): ):
with transaction.atomic(): with transaction.atomic():
result = self.schema.execute(**options) result = execute(schema, document, **execute_options)
if getattr(request, MUTATION_ERRORS_FLAG, False) is True: if getattr(request, MUTATION_ERRORS_FLAG, False) is True:
transaction.set_rollback(True) transaction.set_rollback(True)
return result return result
return self.schema.execute(**options) return execute(schema, document, **execute_options)
except Exception as e: except Exception as e:
return ExecutionResult(errors=[e]) return ExecutionResult(errors=[e])

View File

@ -26,8 +26,7 @@ tests_require = [
dev_requires = [ dev_requires = [
"black==23.7.0", "ruff==0.1.2",
"ruff==0.0.283",
"pre-commit", "pre-commit",
] + tests_require ] + tests_require
@ -50,6 +49,7 @@ setup(
"Programming Language :: Python :: 3.9", "Programming Language :: Python :: 3.9",
"Programming Language :: Python :: 3.10", "Programming Language :: Python :: 3.10",
"Programming Language :: Python :: 3.11", "Programming Language :: Python :: 3.11",
"Programming Language :: Python :: 3.12",
"Programming Language :: Python :: Implementation :: PyPy", "Programming Language :: Python :: Implementation :: PyPy",
"Framework :: Django", "Framework :: Django",
"Framework :: Django :: 3.2", "Framework :: Django :: 3.2",

10
tox.ini
View File

@ -1,8 +1,9 @@
[tox] [tox]
envlist = envlist =
py{38,39,310}-django32 py{38,39,310}-django32
py{38,39}-django{41,42} py{38,39}-django42
py{310,311}-django{41,42,main} py{310,311}-django{42,50,main}
py312-django{42,50,main}
pre-commit pre-commit
[gh-actions] [gh-actions]
@ -11,12 +12,13 @@ python =
3.9: py39 3.9: py39
3.10: py310 3.10: py310
3.11: py311 3.11: py311
3.12: py312
[gh-actions:env] [gh-actions:env]
DJANGO = DJANGO =
3.2: django32 3.2: django32
4.1: django41
4.2: django42 4.2: django42
5.0: django50
main: djangomain main: djangomain
[testenv] [testenv]
@ -29,8 +31,8 @@ deps =
-e.[test] -e.[test]
psycopg2-binary psycopg2-binary
django32: Django>=3.2,<4.0 django32: Django>=3.2,<4.0
django41: Django>=4.1,<4.2
django42: Django>=4.2,<4.3 django42: Django>=4.2,<4.3
django50: Django>=5.0,<5.1
djangomain: https://github.com/django/django/archive/main.zip djangomain: https://github.com/django/django/archive/main.zip
commands = {posargs:pytest --cov=graphene_django graphene_django examples} commands = {posargs:pytest --cov=graphene_django graphene_django examples}