mirror of
https://github.com/graphql-python/graphene-django.git
synced 2025-04-12 13:14:23 +03:00
Merge branch 'main' into setting-convert-choices-to-enum
This commit is contained in:
commit
d947a75d1e
7
.github/workflows/deploy.yml
vendored
7
.github/workflows/deploy.yml
vendored
|
@ -6,8 +6,13 @@ on:
|
|||
- 'v*'
|
||||
|
||||
jobs:
|
||||
build:
|
||||
lint:
|
||||
uses: ./.github/workflows/lint.yml
|
||||
tests:
|
||||
uses: ./.github/workflows/tests.yml
|
||||
release:
|
||||
runs-on: ubuntu-latest
|
||||
needs: [lint, tests]
|
||||
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
|
|
1
.github/workflows/lint.yml
vendored
1
.github/workflows/lint.yml
vendored
|
@ -4,6 +4,7 @@ on:
|
|||
push:
|
||||
branches: ["main"]
|
||||
pull_request:
|
||||
workflow_call:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
|
17
.github/workflows/tests.yml
vendored
17
.github/workflows/tests.yml
vendored
|
@ -4,6 +4,7 @@ on:
|
|||
push:
|
||||
branches: ["main"]
|
||||
pull_request:
|
||||
workflow_call:
|
||||
|
||||
jobs:
|
||||
build:
|
||||
|
@ -11,13 +12,17 @@ jobs:
|
|||
strategy:
|
||||
max-parallel: 4
|
||||
matrix:
|
||||
django: ["3.2", "4.1", "4.2"]
|
||||
python-version: ["3.8", "3.9", "3.10"]
|
||||
include:
|
||||
- django: "4.1"
|
||||
python-version: "3.11"
|
||||
- django: "4.2"
|
||||
django: ["3.2", "4.2", "5.0"]
|
||||
python-version: ["3.8", "3.9", "3.10", "3.11", "3.12"]
|
||||
exclude:
|
||||
- django: "3.2"
|
||||
python-version: "3.11"
|
||||
- django: "3.2"
|
||||
python-version: "3.12"
|
||||
- django: "5.0"
|
||||
python-version: "3.8"
|
||||
- django: "5.0"
|
||||
python-version: "3.9"
|
||||
steps:
|
||||
- uses: actions/checkout@v3
|
||||
- name: Set up Python ${{ matrix.python-version }}
|
||||
|
|
|
@ -2,7 +2,7 @@ default_language_version:
|
|||
python: python3.11
|
||||
repos:
|
||||
- repo: https://github.com/pre-commit/pre-commit-hooks
|
||||
rev: v4.4.0
|
||||
rev: v4.5.0
|
||||
hooks:
|
||||
- id: check-merge-conflict
|
||||
- id: check-json
|
||||
|
@ -15,12 +15,9 @@ repos:
|
|||
- --autofix
|
||||
- id: trailing-whitespace
|
||||
exclude: README.md
|
||||
- repo: https://github.com/psf/black
|
||||
rev: 23.7.0
|
||||
hooks:
|
||||
- id: black
|
||||
- repo: https://github.com/astral-sh/ruff-pre-commit
|
||||
rev: v0.0.283
|
||||
rev: v0.1.2
|
||||
hooks:
|
||||
- id: ruff
|
||||
args: [--fix, --exit-non-zero-on-fix, --show-fixes]
|
||||
- id: ruff-format
|
||||
|
|
|
@ -13,6 +13,7 @@ ignore = [
|
|||
"B017", # pytest.raises(Exception) should be considered evil
|
||||
"B028", # warnings.warn called without an explicit stacklevel keyword argument
|
||||
"B904", # check for raise statements in exception handlers that lack a from clause
|
||||
"W191", # https://docs.astral.sh/ruff/formatter/#conflicting-lint-rules
|
||||
]
|
||||
|
||||
exclude = [
|
||||
|
@ -29,5 +30,4 @@ target-version = "py38"
|
|||
[isort]
|
||||
known-first-party = ["graphene", "graphene-django"]
|
||||
known-local-folder = ["cookbook"]
|
||||
force-wrap-aliases = true
|
||||
combine-as-imports = true
|
||||
|
|
2
Makefile
2
Makefile
|
@ -14,7 +14,7 @@ tests:
|
|||
|
||||
.PHONY: format ## Format code
|
||||
format:
|
||||
black graphene_django examples setup.py
|
||||
ruff format graphene_django examples setup.py
|
||||
|
||||
.PHONY: lint ## Lint code
|
||||
lint:
|
||||
|
|
|
@ -6,7 +6,7 @@ Graphene-Django can be customised using settings. This page explains each settin
|
|||
Usage
|
||||
-----
|
||||
|
||||
Add settings to your Django project by creating a Dictonary with name ``GRAPHENE`` in the project's ``settings.py``:
|
||||
Add settings to your Django project by creating a Dictionary with name ``GRAPHENE`` in the project's ``settings.py``:
|
||||
|
||||
.. code:: python
|
||||
|
||||
|
@ -206,9 +206,6 @@ Set to ``False`` if you want to disable GraphiQL headers editor tab for some rea
|
|||
|
||||
This setting is passed to ``headerEditorEnabled`` GraphiQL options, for details refer to GraphiQLDocs_.
|
||||
|
||||
.. _GraphiQLDocs: https://github.com/graphql/graphiql/tree/main/packages/graphiql#options
|
||||
|
||||
|
||||
Default: ``True``
|
||||
|
||||
.. code:: python
|
||||
|
@ -239,8 +236,6 @@ Set to ``True`` if you want to persist GraphiQL headers after refreshing the pag
|
|||
|
||||
This setting is passed to ``shouldPersistHeaders`` GraphiQL options, for details refer to GraphiQLDocs_.
|
||||
|
||||
.. _GraphiQLDocs: https://github.com/graphql/graphiql/tree/main/packages/graphiql#options
|
||||
|
||||
|
||||
Default: ``False``
|
||||
|
||||
|
@ -249,3 +244,37 @@ Default: ``False``
|
|||
GRAPHENE = {
|
||||
'GRAPHIQL_SHOULD_PERSIST_HEADERS': False,
|
||||
}
|
||||
|
||||
|
||||
``GRAPHIQL_INPUT_VALUE_DEPRECATION``
|
||||
------------------------------------
|
||||
|
||||
Set to ``True`` if you want GraphiQL to show any deprecated fields on input object types' docs.
|
||||
|
||||
For example, having this schema:
|
||||
|
||||
.. code:: python
|
||||
|
||||
class MyMutationInputType(graphene.InputObjectType):
|
||||
old_field = graphene.String(deprecation_reason="You should now use 'newField' instead.")
|
||||
new_field = graphene.String()
|
||||
|
||||
class MyMutation(graphene.Mutation):
|
||||
class Arguments:
|
||||
input = types.MyMutationInputType()
|
||||
|
||||
GraphiQL will add a ``Show Deprecated Fields`` button to toggle information display on ``oldField`` and its deprecation
|
||||
reason. Otherwise, you would get neither a button nor any information at all on ``oldField``.
|
||||
|
||||
This setting is passed to ``inputValueDeprecation`` GraphiQL options, for details refer to GraphiQLDocs_.
|
||||
|
||||
Default: ``False``
|
||||
|
||||
.. code:: python
|
||||
|
||||
GRAPHENE = {
|
||||
'GRAPHIQL_INPUT_VALUE_DEPRECATION': False,
|
||||
}
|
||||
|
||||
|
||||
.. _GraphiQLDocs: https://graphiql-test.netlify.app/typedoc/modules/graphiql_react#graphiqlprovider-2
|
||||
|
|
|
@ -231,7 +231,7 @@
|
|||
"fields": {
|
||||
"category": 3,
|
||||
"name": "Newt",
|
||||
"notes": "Braised and Confuesd"
|
||||
"notes": "Braised and Confused"
|
||||
},
|
||||
"model": "ingredients.ingredient",
|
||||
"pk": 5
|
||||
|
|
|
@ -28,7 +28,7 @@ def initialize():
|
|||
|
||||
# Yeah, technically it's Corellian. But it flew in the service of the rebels,
|
||||
# so for the purposes of this demo it's a rebel ship.
|
||||
falcon = Ship(id="4", name="Millenium Falcon", faction=rebels)
|
||||
falcon = Ship(id="4", name="Millennium Falcon", faction=rebels)
|
||||
falcon.save()
|
||||
|
||||
homeOne = Ship(id="5", name="Home One", faction=rebels)
|
||||
|
|
|
@ -40,7 +40,7 @@ def test_mutations():
|
|||
{"node": {"id": "U2hpcDox", "name": "X-Wing"}},
|
||||
{"node": {"id": "U2hpcDoy", "name": "Y-Wing"}},
|
||||
{"node": {"id": "U2hpcDoz", "name": "A-Wing"}},
|
||||
{"node": {"id": "U2hpcDo0", "name": "Millenium Falcon"}},
|
||||
{"node": {"id": "U2hpcDo0", "name": "Millennium Falcon"}},
|
||||
{"node": {"id": "U2hpcDo1", "name": "Home One"}},
|
||||
{"node": {"id": "U2hpcDo5", "name": "Peter"}},
|
||||
]
|
||||
|
|
|
@ -1,3 +1,6 @@
|
|||
import sys
|
||||
from pathlib import PurePath
|
||||
|
||||
# For backwards compatibility, we import JSONField to have it available for import via
|
||||
# this compat module (https://github.com/graphql-python/graphene-django/issues/1428).
|
||||
# Django's JSONField is available in Django 3.2+ (the minimum version we support)
|
||||
|
@ -19,4 +22,23 @@ try:
|
|||
RangeField,
|
||||
)
|
||||
except ImportError:
|
||||
IntegerRangeField, ArrayField, HStoreField, RangeField = (MissingType,) * 4
|
||||
IntegerRangeField, HStoreField, RangeField = (MissingType,) * 3
|
||||
|
||||
# For unit tests we fake ArrayField using JSONFields
|
||||
if any(
|
||||
PurePath(sys.argv[0]).match(p)
|
||||
for p in [
|
||||
"**/pytest",
|
||||
"**/py.test",
|
||||
"**/pytest/__main__.py",
|
||||
]
|
||||
):
|
||||
|
||||
class ArrayField(JSONField):
|
||||
def __init__(self, *args, **kwargs):
|
||||
if len(args) > 0:
|
||||
self.base_field = args[0]
|
||||
super().__init__(**kwargs)
|
||||
|
||||
else:
|
||||
ArrayField = MissingType
|
||||
|
|
|
@ -194,7 +194,7 @@ class DjangoConnectionField(ConnectionField):
|
|||
enforce_first_or_last,
|
||||
root,
|
||||
info,
|
||||
**args
|
||||
**args,
|
||||
):
|
||||
first = args.get("first")
|
||||
last = args.get("last")
|
||||
|
|
|
@ -36,7 +36,7 @@ class DjangoFilterConnectionField(DjangoConnectionField):
|
|||
extra_filter_meta=None,
|
||||
filterset_class=None,
|
||||
*args,
|
||||
**kwargs
|
||||
**kwargs,
|
||||
):
|
||||
self._fields = fields
|
||||
self._provided_filterset_class = filterset_class
|
||||
|
|
|
@ -1,13 +1,36 @@
|
|||
from django_filters.constants import EMPTY_VALUES
|
||||
from django_filters.filters import FilterMethod
|
||||
|
||||
from .typed_filter import TypedFilter
|
||||
|
||||
|
||||
class ArrayFilterMethod(FilterMethod):
|
||||
def __call__(self, qs, value):
|
||||
if value is None:
|
||||
return qs
|
||||
return self.method(qs, self.f.field_name, value)
|
||||
|
||||
|
||||
class ArrayFilter(TypedFilter):
|
||||
"""
|
||||
Filter made for PostgreSQL ArrayField.
|
||||
"""
|
||||
|
||||
@TypedFilter.method.setter
|
||||
def method(self, value):
|
||||
"""
|
||||
Override method setter so that in case a custom `method` is provided
|
||||
(see documentation https://django-filter.readthedocs.io/en/stable/ref/filters.html#method),
|
||||
it doesn't fall back to checking if the value is in `EMPTY_VALUES` (from the `__call__` method
|
||||
of the `FilterMethod` class) and instead use our ArrayFilterMethod that consider empty lists as values.
|
||||
|
||||
Indeed when providing a `method` the `filter` method below is overridden and replaced by `FilterMethod(self)`
|
||||
which means that the validation of the empty value is made by the `FilterMethod.__call__` method instead.
|
||||
"""
|
||||
TypedFilter.method.fset(self, value)
|
||||
if value is not None:
|
||||
self.filter = ArrayFilterMethod(self)
|
||||
|
||||
def filter(self, qs, value):
|
||||
"""
|
||||
Override the default filter class to check first whether the list is
|
||||
|
|
|
@ -1,12 +1,36 @@
|
|||
from django_filters.filters import FilterMethod
|
||||
|
||||
from .typed_filter import TypedFilter
|
||||
|
||||
|
||||
class ListFilterMethod(FilterMethod):
|
||||
def __call__(self, qs, value):
|
||||
if value is None:
|
||||
return qs
|
||||
return self.method(qs, self.f.field_name, value)
|
||||
|
||||
|
||||
class ListFilter(TypedFilter):
|
||||
"""
|
||||
Filter that takes a list of value as input.
|
||||
It is for example used for `__in` filters.
|
||||
"""
|
||||
|
||||
@TypedFilter.method.setter
|
||||
def method(self, value):
|
||||
"""
|
||||
Override method setter so that in case a custom `method` is provided
|
||||
(see documentation https://django-filter.readthedocs.io/en/stable/ref/filters.html#method),
|
||||
it doesn't fall back to checking if the value is in `EMPTY_VALUES` (from the `__call__` method
|
||||
of the `FilterMethod` class) and instead use our ListFilterMethod that consider empty lists as values.
|
||||
|
||||
Indeed when providing a `method` the `filter` method below is overridden and replaced by `FilterMethod(self)`
|
||||
which means that the validation of the empty value is made by the `FilterMethod.__call__` method instead.
|
||||
"""
|
||||
TypedFilter.method.fset(self, value)
|
||||
if value is not None:
|
||||
self.filter = ListFilterMethod(self)
|
||||
|
||||
def filter(self, qs, value):
|
||||
"""
|
||||
Override the default filter class to check first whether the list is
|
||||
|
|
|
@ -1,4 +1,4 @@
|
|||
from unittest.mock import MagicMock
|
||||
from functools import reduce
|
||||
|
||||
import pytest
|
||||
from django.db import models
|
||||
|
@ -25,15 +25,15 @@ else:
|
|||
)
|
||||
|
||||
|
||||
STORE = {"events": []}
|
||||
|
||||
|
||||
class Event(models.Model):
|
||||
name = models.CharField(max_length=50)
|
||||
tags = ArrayField(models.CharField(max_length=50))
|
||||
tag_ids = ArrayField(models.IntegerField())
|
||||
random_field = ArrayField(models.BooleanField())
|
||||
|
||||
def __repr__(self):
|
||||
return f"Event [{self.name}]"
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def EventFilterSet():
|
||||
|
@ -44,10 +44,18 @@ def EventFilterSet():
|
|||
"name": ["exact", "contains"],
|
||||
}
|
||||
|
||||
# Those are actually usable with our Query fixture bellow
|
||||
# Those are actually usable with our Query fixture below
|
||||
tags__contains = ArrayFilter(field_name="tags", lookup_expr="contains")
|
||||
tags__overlap = ArrayFilter(field_name="tags", lookup_expr="overlap")
|
||||
tags = ArrayFilter(field_name="tags", lookup_expr="exact")
|
||||
tags__len = ArrayFilter(
|
||||
field_name="tags", lookup_expr="len", input_type=graphene.Int
|
||||
)
|
||||
tags__len__in = ArrayFilter(
|
||||
field_name="tags",
|
||||
method="tags__len__in_filter",
|
||||
input_type=graphene.List(graphene.Int),
|
||||
)
|
||||
|
||||
# Those are actually not usable and only to check type declarations
|
||||
tags_ids__contains = ArrayFilter(field_name="tag_ids", lookup_expr="contains")
|
||||
|
@ -61,6 +69,14 @@ def EventFilterSet():
|
|||
)
|
||||
random_field = ArrayFilter(field_name="random_field", lookup_expr="exact")
|
||||
|
||||
def tags__len__in_filter(self, queryset, _name, value):
|
||||
if not value:
|
||||
return queryset.none()
|
||||
return reduce(
|
||||
lambda q1, q2: q1.union(q2),
|
||||
[queryset.filter(tags__len=v) for v in value],
|
||||
).distinct()
|
||||
|
||||
return EventFilterSet
|
||||
|
||||
|
||||
|
@ -83,68 +99,94 @@ def Query(EventType):
|
|||
we are running unit tests in sqlite which does not have ArrayFields.
|
||||
"""
|
||||
|
||||
events = [
|
||||
Event(name="Live Show", tags=["concert", "music", "rock"]),
|
||||
Event(name="Musical", tags=["movie", "music"]),
|
||||
Event(name="Ballet", tags=["concert", "dance"]),
|
||||
Event(name="Speech", tags=[]),
|
||||
]
|
||||
|
||||
class Query(graphene.ObjectType):
|
||||
events = DjangoFilterConnectionField(EventType)
|
||||
|
||||
def resolve_events(self, info, **kwargs):
|
||||
events = [
|
||||
Event(name="Live Show", tags=["concert", "music", "rock"]),
|
||||
Event(name="Musical", tags=["movie", "music"]),
|
||||
Event(name="Ballet", tags=["concert", "dance"]),
|
||||
Event(name="Speech", tags=[]),
|
||||
]
|
||||
class FakeQuerySet(QuerySet):
|
||||
def __init__(self, model=None):
|
||||
self.model = Event
|
||||
self.__store = list(events)
|
||||
|
||||
STORE["events"] = events
|
||||
def all(self):
|
||||
return self
|
||||
|
||||
m_queryset = MagicMock(spec=QuerySet)
|
||||
m_queryset.model = Event
|
||||
|
||||
def filter_events(**kwargs):
|
||||
if "tags__contains" in kwargs:
|
||||
STORE["events"] = list(
|
||||
filter(
|
||||
lambda e: set(kwargs["tags__contains"]).issubset(
|
||||
set(e.tags)
|
||||
),
|
||||
STORE["events"],
|
||||
def filter(self, **kwargs):
|
||||
queryset = FakeQuerySet()
|
||||
queryset.__store = list(self.__store)
|
||||
if "tags__contains" in kwargs:
|
||||
queryset.__store = list(
|
||||
filter(
|
||||
lambda e: set(kwargs["tags__contains"]).issubset(
|
||||
set(e.tags)
|
||||
),
|
||||
queryset.__store,
|
||||
)
|
||||
)
|
||||
)
|
||||
if "tags__overlap" in kwargs:
|
||||
STORE["events"] = list(
|
||||
filter(
|
||||
lambda e: not set(kwargs["tags__overlap"]).isdisjoint(
|
||||
set(e.tags)
|
||||
),
|
||||
STORE["events"],
|
||||
if "tags__overlap" in kwargs:
|
||||
queryset.__store = list(
|
||||
filter(
|
||||
lambda e: not set(kwargs["tags__overlap"]).isdisjoint(
|
||||
set(e.tags)
|
||||
),
|
||||
queryset.__store,
|
||||
)
|
||||
)
|
||||
)
|
||||
if "tags__exact" in kwargs:
|
||||
STORE["events"] = list(
|
||||
filter(
|
||||
lambda e: set(kwargs["tags__exact"]) == set(e.tags),
|
||||
STORE["events"],
|
||||
if "tags__exact" in kwargs:
|
||||
queryset.__store = list(
|
||||
filter(
|
||||
lambda e: set(kwargs["tags__exact"]) == set(e.tags),
|
||||
queryset.__store,
|
||||
)
|
||||
)
|
||||
)
|
||||
if "tags__len" in kwargs:
|
||||
queryset.__store = list(
|
||||
filter(
|
||||
lambda e: len(e.tags) == kwargs["tags__len"],
|
||||
queryset.__store,
|
||||
)
|
||||
)
|
||||
return queryset
|
||||
|
||||
def mock_queryset_filter(*args, **kwargs):
|
||||
filter_events(**kwargs)
|
||||
return m_queryset
|
||||
def union(self, *args):
|
||||
queryset = FakeQuerySet()
|
||||
queryset.__store = self.__store
|
||||
for arg in args:
|
||||
queryset.__store += arg.__store
|
||||
return queryset
|
||||
|
||||
def mock_queryset_none(*args, **kwargs):
|
||||
STORE["events"] = []
|
||||
return m_queryset
|
||||
def none(self):
|
||||
queryset = FakeQuerySet()
|
||||
queryset.__store = []
|
||||
return queryset
|
||||
|
||||
def mock_queryset_count(*args, **kwargs):
|
||||
return len(STORE["events"])
|
||||
def count(self):
|
||||
return len(self.__store)
|
||||
|
||||
m_queryset.all.return_value = m_queryset
|
||||
m_queryset.filter.side_effect = mock_queryset_filter
|
||||
m_queryset.none.side_effect = mock_queryset_none
|
||||
m_queryset.count.side_effect = mock_queryset_count
|
||||
m_queryset.__getitem__.side_effect = lambda index: STORE[
|
||||
"events"
|
||||
].__getitem__(index)
|
||||
def distinct(self):
|
||||
queryset = FakeQuerySet()
|
||||
queryset.__store = []
|
||||
for event in self.__store:
|
||||
if event not in queryset.__store:
|
||||
queryset.__store.append(event)
|
||||
queryset.__store = sorted(queryset.__store, key=lambda e: e.name)
|
||||
return queryset
|
||||
|
||||
return m_queryset
|
||||
def __getitem__(self, index):
|
||||
return self.__store[index]
|
||||
|
||||
return FakeQuerySet()
|
||||
|
||||
return Query
|
||||
|
||||
|
||||
@pytest.fixture
|
||||
def schema(Query):
|
||||
return graphene.Schema(query=Query)
|
||||
|
|
|
@ -1,18 +1,14 @@
|
|||
import pytest
|
||||
|
||||
from graphene import Schema
|
||||
|
||||
from ...compat import ArrayField, MissingType
|
||||
|
||||
|
||||
@pytest.mark.skipif(ArrayField is MissingType, reason="ArrayField should exist")
|
||||
def test_array_field_contains_multiple(Query):
|
||||
def test_array_field_contains_multiple(schema):
|
||||
"""
|
||||
Test contains filter on a array field of string.
|
||||
"""
|
||||
|
||||
schema = Schema(query=Query)
|
||||
|
||||
query = """
|
||||
query {
|
||||
events (tags_Contains: ["concert", "music"]) {
|
||||
|
@ -32,13 +28,11 @@ def test_array_field_contains_multiple(Query):
|
|||
|
||||
|
||||
@pytest.mark.skipif(ArrayField is MissingType, reason="ArrayField should exist")
|
||||
def test_array_field_contains_one(Query):
|
||||
def test_array_field_contains_one(schema):
|
||||
"""
|
||||
Test contains filter on a array field of string.
|
||||
"""
|
||||
|
||||
schema = Schema(query=Query)
|
||||
|
||||
query = """
|
||||
query {
|
||||
events (tags_Contains: ["music"]) {
|
||||
|
@ -59,13 +53,11 @@ def test_array_field_contains_one(Query):
|
|||
|
||||
|
||||
@pytest.mark.skipif(ArrayField is MissingType, reason="ArrayField should exist")
|
||||
def test_array_field_contains_empty_list(Query):
|
||||
def test_array_field_contains_empty_list(schema):
|
||||
"""
|
||||
Test contains filter on a array field of string.
|
||||
"""
|
||||
|
||||
schema = Schema(query=Query)
|
||||
|
||||
query = """
|
||||
query {
|
||||
events (tags_Contains: []) {
|
||||
|
|
186
graphene_django/filter/tests/test_array_field_custom_filter.py
Normal file
186
graphene_django/filter/tests/test_array_field_custom_filter.py
Normal file
|
@ -0,0 +1,186 @@
|
|||
import pytest
|
||||
|
||||
from ...compat import ArrayField, MissingType
|
||||
|
||||
|
||||
@pytest.mark.skipif(ArrayField is MissingType, reason="ArrayField should exist")
|
||||
def test_array_field_len_filter(schema):
|
||||
query = """
|
||||
query {
|
||||
events (tags_Len: 2) {
|
||||
edges {
|
||||
node {
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
result = schema.execute(query)
|
||||
assert not result.errors
|
||||
assert result.data["events"]["edges"] == [
|
||||
{"node": {"name": "Musical"}},
|
||||
{"node": {"name": "Ballet"}},
|
||||
]
|
||||
|
||||
query = """
|
||||
query {
|
||||
events (tags_Len: 0) {
|
||||
edges {
|
||||
node {
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
result = schema.execute(query)
|
||||
assert not result.errors
|
||||
assert result.data["events"]["edges"] == [
|
||||
{"node": {"name": "Speech"}},
|
||||
]
|
||||
|
||||
query = """
|
||||
query {
|
||||
events (tags_Len: 10) {
|
||||
edges {
|
||||
node {
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
result = schema.execute(query)
|
||||
assert not result.errors
|
||||
assert result.data["events"]["edges"] == []
|
||||
|
||||
query = """
|
||||
query {
|
||||
events (tags_Len: "2") {
|
||||
edges {
|
||||
node {
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
result = schema.execute(query)
|
||||
assert len(result.errors) == 1
|
||||
assert result.errors[0].message == 'Int cannot represent non-integer value: "2"'
|
||||
|
||||
query = """
|
||||
query {
|
||||
events (tags_Len: True) {
|
||||
edges {
|
||||
node {
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
result = schema.execute(query)
|
||||
assert len(result.errors) == 1
|
||||
assert result.errors[0].message == "Int cannot represent non-integer value: True"
|
||||
|
||||
|
||||
@pytest.mark.skipif(ArrayField is MissingType, reason="ArrayField should exist")
|
||||
def test_array_field_custom_filter(schema):
|
||||
query = """
|
||||
query {
|
||||
events (tags_Len_In: 2) {
|
||||
edges {
|
||||
node {
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
result = schema.execute(query)
|
||||
assert not result.errors
|
||||
assert result.data["events"]["edges"] == [
|
||||
{"node": {"name": "Ballet"}},
|
||||
{"node": {"name": "Musical"}},
|
||||
]
|
||||
|
||||
query = """
|
||||
query {
|
||||
events (tags_Len_In: [0, 2]) {
|
||||
edges {
|
||||
node {
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
result = schema.execute(query)
|
||||
assert not result.errors
|
||||
assert result.data["events"]["edges"] == [
|
||||
{"node": {"name": "Ballet"}},
|
||||
{"node": {"name": "Musical"}},
|
||||
{"node": {"name": "Speech"}},
|
||||
]
|
||||
|
||||
query = """
|
||||
query {
|
||||
events (tags_Len_In: [10]) {
|
||||
edges {
|
||||
node {
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
result = schema.execute(query)
|
||||
assert not result.errors
|
||||
assert result.data["events"]["edges"] == []
|
||||
|
||||
query = """
|
||||
query {
|
||||
events (tags_Len_In: []) {
|
||||
edges {
|
||||
node {
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
result = schema.execute(query)
|
||||
assert not result.errors
|
||||
assert result.data["events"]["edges"] == []
|
||||
|
||||
query = """
|
||||
query {
|
||||
events (tags_Len_In: "12") {
|
||||
edges {
|
||||
node {
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
result = schema.execute(query)
|
||||
assert len(result.errors) == 1
|
||||
assert result.errors[0].message == 'Int cannot represent non-integer value: "12"'
|
||||
|
||||
query = """
|
||||
query {
|
||||
events (tags_Len_In: True) {
|
||||
edges {
|
||||
node {
|
||||
name
|
||||
}
|
||||
}
|
||||
}
|
||||
}
|
||||
"""
|
||||
result = schema.execute(query)
|
||||
assert len(result.errors) == 1
|
||||
assert result.errors[0].message == "Int cannot represent non-integer value: True"
|
|
@ -1,18 +1,14 @@
|
|||
import pytest
|
||||
|
||||
from graphene import Schema
|
||||
|
||||
from ...compat import ArrayField, MissingType
|
||||
|
||||
|
||||
@pytest.mark.skipif(ArrayField is MissingType, reason="ArrayField should exist")
|
||||
def test_array_field_exact_no_match(Query):
|
||||
def test_array_field_exact_no_match(schema):
|
||||
"""
|
||||
Test exact filter on a array field of string.
|
||||
"""
|
||||
|
||||
schema = Schema(query=Query)
|
||||
|
||||
query = """
|
||||
query {
|
||||
events (tags: ["concert", "music"]) {
|
||||
|
@ -30,13 +26,11 @@ def test_array_field_exact_no_match(Query):
|
|||
|
||||
|
||||
@pytest.mark.skipif(ArrayField is MissingType, reason="ArrayField should exist")
|
||||
def test_array_field_exact_match(Query):
|
||||
def test_array_field_exact_match(schema):
|
||||
"""
|
||||
Test exact filter on a array field of string.
|
||||
"""
|
||||
|
||||
schema = Schema(query=Query)
|
||||
|
||||
query = """
|
||||
query {
|
||||
events (tags: ["movie", "music"]) {
|
||||
|
@ -56,13 +50,11 @@ def test_array_field_exact_match(Query):
|
|||
|
||||
|
||||
@pytest.mark.skipif(ArrayField is MissingType, reason="ArrayField should exist")
|
||||
def test_array_field_exact_empty_list(Query):
|
||||
def test_array_field_exact_empty_list(schema):
|
||||
"""
|
||||
Test exact filter on a array field of string.
|
||||
"""
|
||||
|
||||
schema = Schema(query=Query)
|
||||
|
||||
query = """
|
||||
query {
|
||||
events (tags: []) {
|
||||
|
@ -82,11 +74,10 @@ def test_array_field_exact_empty_list(Query):
|
|||
|
||||
|
||||
@pytest.mark.skipif(ArrayField is MissingType, reason="ArrayField should exist")
|
||||
def test_array_field_filter_schema_type(Query):
|
||||
def test_array_field_filter_schema_type(schema):
|
||||
"""
|
||||
Check that the type in the filter is an array field like on the object type.
|
||||
"""
|
||||
schema = Schema(query=Query)
|
||||
schema_str = str(schema)
|
||||
|
||||
assert (
|
||||
|
@ -112,6 +103,8 @@ def test_array_field_filter_schema_type(Query):
|
|||
"tags_Contains": "[String!]",
|
||||
"tags_Overlap": "[String!]",
|
||||
"tags": "[String!]",
|
||||
"tags_Len": "Int",
|
||||
"tags_Len_In": "[Int]",
|
||||
"tagsIds_Contains": "[Int!]",
|
||||
"tagsIds_Overlap": "[Int!]",
|
||||
"tagsIds": "[Int!]",
|
||||
|
|
|
@ -1,18 +1,14 @@
|
|||
import pytest
|
||||
|
||||
from graphene import Schema
|
||||
|
||||
from ...compat import ArrayField, MissingType
|
||||
|
||||
|
||||
@pytest.mark.skipif(ArrayField is MissingType, reason="ArrayField should exist")
|
||||
def test_array_field_overlap_multiple(Query):
|
||||
def test_array_field_overlap_multiple(schema):
|
||||
"""
|
||||
Test overlap filter on a array field of string.
|
||||
"""
|
||||
|
||||
schema = Schema(query=Query)
|
||||
|
||||
query = """
|
||||
query {
|
||||
events (tags_Overlap: ["concert", "music"]) {
|
||||
|
@ -34,13 +30,11 @@ def test_array_field_overlap_multiple(Query):
|
|||
|
||||
|
||||
@pytest.mark.skipif(ArrayField is MissingType, reason="ArrayField should exist")
|
||||
def test_array_field_overlap_one(Query):
|
||||
def test_array_field_overlap_one(schema):
|
||||
"""
|
||||
Test overlap filter on a array field of string.
|
||||
"""
|
||||
|
||||
schema = Schema(query=Query)
|
||||
|
||||
query = """
|
||||
query {
|
||||
events (tags_Overlap: ["music"]) {
|
||||
|
@ -61,13 +55,11 @@ def test_array_field_overlap_one(Query):
|
|||
|
||||
|
||||
@pytest.mark.skipif(ArrayField is MissingType, reason="ArrayField should exist")
|
||||
def test_array_field_overlap_empty_list(Query):
|
||||
def test_array_field_overlap_empty_list(schema):
|
||||
"""
|
||||
Test overlap filter on a array field of string.
|
||||
"""
|
||||
|
||||
schema = Schema(query=Query)
|
||||
|
||||
query = """
|
||||
query {
|
||||
events (tags_Overlap: []) {
|
||||
|
|
|
@ -789,7 +789,7 @@ def test_order_by():
|
|||
|
||||
query = """
|
||||
query NodeFilteringQuery {
|
||||
allReporters(orderBy: "-firtsnaMe") {
|
||||
allReporters(orderBy: "-firstname") {
|
||||
edges {
|
||||
node {
|
||||
firstName
|
||||
|
@ -802,7 +802,7 @@ def test_order_by():
|
|||
assert result.errors
|
||||
|
||||
|
||||
def test_order_by_is_perserved():
|
||||
def test_order_by_is_preserved():
|
||||
class ReporterType(DjangoObjectType):
|
||||
class Meta:
|
||||
model = Reporter
|
||||
|
|
|
@ -1,4 +1,8 @@
|
|||
import operator
|
||||
from functools import reduce
|
||||
|
||||
import pytest
|
||||
from django.db.models import Q
|
||||
from django_filters import FilterSet
|
||||
|
||||
import graphene
|
||||
|
@ -44,6 +48,10 @@ def schema():
|
|||
only_first = TypedFilter(
|
||||
input_type=graphene.Boolean, method="only_first_filter"
|
||||
)
|
||||
headline_search = ListFilter(
|
||||
method="headline_search_filter",
|
||||
input_type=graphene.List(graphene.String),
|
||||
)
|
||||
|
||||
def first_n_filter(self, queryset, _name, value):
|
||||
return queryset[:value]
|
||||
|
@ -54,6 +62,13 @@ def schema():
|
|||
else:
|
||||
return queryset
|
||||
|
||||
def headline_search_filter(self, queryset, _name, value):
|
||||
if not value:
|
||||
return queryset.none()
|
||||
return queryset.filter(
|
||||
reduce(operator.or_, [Q(headline__icontains=v) for v in value])
|
||||
)
|
||||
|
||||
class ArticleType(DjangoObjectType):
|
||||
class Meta:
|
||||
model = Article
|
||||
|
@ -87,6 +102,7 @@ def test_typed_filter_schema(schema):
|
|||
"lang_InStr": "[String]",
|
||||
"firstN": "Int",
|
||||
"onlyFirst": "Boolean",
|
||||
"headlineSearch": "[String]",
|
||||
}
|
||||
|
||||
all_articles_filters = (
|
||||
|
@ -104,24 +120,7 @@ def test_typed_filters_work(schema):
|
|||
Article.objects.create(headline="A", reporter=reporter, editor=reporter, lang="es")
|
||||
Article.objects.create(headline="B", reporter=reporter, editor=reporter, lang="es")
|
||||
Article.objects.create(headline="C", reporter=reporter, editor=reporter, lang="en")
|
||||
|
||||
query = "query { articles (lang_In: [ES]) { edges { node { headline } } } }"
|
||||
|
||||
result = schema.execute(query)
|
||||
assert not result.errors
|
||||
assert result.data["articles"]["edges"] == [
|
||||
{"node": {"headline": "A"}},
|
||||
{"node": {"headline": "B"}},
|
||||
]
|
||||
|
||||
query = 'query { articles (lang_InStr: ["es"]) { edges { node { headline } } } }'
|
||||
|
||||
result = schema.execute(query)
|
||||
assert not result.errors
|
||||
assert result.data["articles"]["edges"] == [
|
||||
{"node": {"headline": "A"}},
|
||||
{"node": {"headline": "B"}},
|
||||
]
|
||||
Article.objects.create(headline="AB", reporter=reporter, editor=reporter, lang="es")
|
||||
|
||||
query = 'query { articles (lang_Contains: "n") { edges { node { headline } } } }'
|
||||
|
||||
|
@ -137,7 +136,7 @@ def test_typed_filters_work(schema):
|
|||
assert not result.errors
|
||||
assert result.data["articles"]["edges"] == [
|
||||
{"node": {"headline": "A"}},
|
||||
{"node": {"headline": "B"}},
|
||||
{"node": {"headline": "AB"}},
|
||||
]
|
||||
|
||||
query = "query { articles (onlyFirst: true) { edges { node { headline } } } }"
|
||||
|
@ -147,3 +146,86 @@ def test_typed_filters_work(schema):
|
|||
assert result.data["articles"]["edges"] == [
|
||||
{"node": {"headline": "A"}},
|
||||
]
|
||||
|
||||
|
||||
def test_list_filters_work(schema):
|
||||
reporter = Reporter.objects.create(first_name="John", last_name="Doe", email="")
|
||||
Article.objects.create(headline="A", reporter=reporter, editor=reporter, lang="es")
|
||||
Article.objects.create(headline="B", reporter=reporter, editor=reporter, lang="es")
|
||||
Article.objects.create(headline="C", reporter=reporter, editor=reporter, lang="en")
|
||||
Article.objects.create(headline="AB", reporter=reporter, editor=reporter, lang="es")
|
||||
|
||||
query = "query { articles (lang_In: [ES]) { edges { node { headline } } } }"
|
||||
|
||||
result = schema.execute(query)
|
||||
assert not result.errors
|
||||
assert result.data["articles"]["edges"] == [
|
||||
{"node": {"headline": "A"}},
|
||||
{"node": {"headline": "AB"}},
|
||||
{"node": {"headline": "B"}},
|
||||
]
|
||||
|
||||
query = 'query { articles (lang_InStr: ["es"]) { edges { node { headline } } } }'
|
||||
|
||||
result = schema.execute(query)
|
||||
assert not result.errors
|
||||
assert result.data["articles"]["edges"] == [
|
||||
{"node": {"headline": "A"}},
|
||||
{"node": {"headline": "AB"}},
|
||||
{"node": {"headline": "B"}},
|
||||
]
|
||||
|
||||
query = "query { articles (lang_InStr: []) { edges { node { headline } } } }"
|
||||
|
||||
result = schema.execute(query)
|
||||
assert not result.errors
|
||||
assert result.data["articles"]["edges"] == []
|
||||
|
||||
query = "query { articles (lang_InStr: null) { edges { node { headline } } } }"
|
||||
|
||||
result = schema.execute(query)
|
||||
assert not result.errors
|
||||
assert result.data["articles"]["edges"] == [
|
||||
{"node": {"headline": "A"}},
|
||||
{"node": {"headline": "AB"}},
|
||||
{"node": {"headline": "B"}},
|
||||
{"node": {"headline": "C"}},
|
||||
]
|
||||
|
||||
query = 'query { articles (headlineSearch: ["a", "B"]) { edges { node { headline } } } }'
|
||||
|
||||
result = schema.execute(query)
|
||||
assert not result.errors
|
||||
assert result.data["articles"]["edges"] == [
|
||||
{"node": {"headline": "A"}},
|
||||
{"node": {"headline": "AB"}},
|
||||
{"node": {"headline": "B"}},
|
||||
]
|
||||
|
||||
query = "query { articles (headlineSearch: []) { edges { node { headline } } } }"
|
||||
|
||||
result = schema.execute(query)
|
||||
assert not result.errors
|
||||
assert result.data["articles"]["edges"] == []
|
||||
|
||||
query = "query { articles (headlineSearch: null) { edges { node { headline } } } }"
|
||||
|
||||
result = schema.execute(query)
|
||||
assert not result.errors
|
||||
assert result.data["articles"]["edges"] == [
|
||||
{"node": {"headline": "A"}},
|
||||
{"node": {"headline": "AB"}},
|
||||
{"node": {"headline": "B"}},
|
||||
{"node": {"headline": "C"}},
|
||||
]
|
||||
|
||||
query = 'query { articles (headlineSearch: [""]) { edges { node { headline } } } }'
|
||||
|
||||
result = schema.execute(query)
|
||||
assert not result.errors
|
||||
assert result.data["articles"]["edges"] == [
|
||||
{"node": {"headline": "A"}},
|
||||
{"node": {"headline": "AB"}},
|
||||
{"node": {"headline": "B"}},
|
||||
{"node": {"headline": "C"}},
|
||||
]
|
||||
|
|
|
@ -43,7 +43,7 @@ def get_filtering_args_from_filterset(filterset_class, type):
|
|||
isinstance(filter_field, TypedFilter)
|
||||
and filter_field.input_type is not None
|
||||
):
|
||||
# First check if the filter input type has been explicitely given
|
||||
# First check if the filter input type has been explicitly given
|
||||
field_type = filter_field.input_type
|
||||
else:
|
||||
if name not in filterset_class.declared_filters or isinstance(
|
||||
|
@ -145,7 +145,7 @@ def replace_csv_filters(filterset_class):
|
|||
label=filter_field.label,
|
||||
method=filter_field.method,
|
||||
exclude=filter_field.exclude,
|
||||
**filter_field.extra
|
||||
**filter_field.extra,
|
||||
)
|
||||
elif filter_type == "range":
|
||||
filterset_class.base_filters[name] = RangeFilter(
|
||||
|
@ -154,5 +154,5 @@ def replace_csv_filters(filterset_class):
|
|||
label=filter_field.label,
|
||||
method=filter_field.method,
|
||||
exclude=filter_field.exclude,
|
||||
**filter_field.extra
|
||||
**filter_field.extra,
|
||||
)
|
||||
|
|
|
@ -23,8 +23,7 @@ def fields_for_form(form, only_fields, exclude_fields):
|
|||
for name, field in form.fields.items():
|
||||
is_not_in_only = only_fields and name not in only_fields
|
||||
is_excluded = (
|
||||
name
|
||||
in exclude_fields # or
|
||||
name in exclude_fields # or
|
||||
# name in already_created_fields
|
||||
)
|
||||
|
||||
|
|
|
@ -4,7 +4,7 @@ from graphene.types.inputobjecttype import InputObjectType
|
|||
from graphene.utils.str_converters import to_camel_case
|
||||
|
||||
from ..converter import BlankValueField
|
||||
from ..types import ErrorType # noqa Import ErrorType for backwards compatability
|
||||
from ..types import ErrorType # noqa Import ErrorType for backwards compatibility
|
||||
from .mutation import fields_for_form
|
||||
|
||||
|
||||
|
@ -60,7 +60,7 @@ class DjangoFormInputObjectType(InputObjectType):
|
|||
and isinstance(object_type._meta.fields[name], BlankValueField)
|
||||
):
|
||||
# Field type BlankValueField here means that field
|
||||
# with choises have been converted to enum
|
||||
# with choices have been converted to enum
|
||||
# (BlankValueField is using only for that task ?)
|
||||
setattr(cls, name, cls.get_enum_cnv_cls_instance(name, object_type))
|
||||
elif (
|
||||
|
|
|
@ -19,6 +19,7 @@ class SerializerMutationOptions(MutationOptions):
|
|||
model_class = None
|
||||
model_operations = ["create", "update"]
|
||||
serializer_class = None
|
||||
optional_fields = ()
|
||||
|
||||
|
||||
def fields_for_serializer(
|
||||
|
@ -28,6 +29,7 @@ def fields_for_serializer(
|
|||
is_input=False,
|
||||
convert_choices_to_enum=True,
|
||||
lookup_field=None,
|
||||
optional_fields=(),
|
||||
):
|
||||
fields = OrderedDict()
|
||||
for name, field in serializer.fields.items():
|
||||
|
@ -48,9 +50,13 @@ def fields_for_serializer(
|
|||
|
||||
if is_not_in_only or is_excluded:
|
||||
continue
|
||||
is_optional = name in optional_fields or "__all__" in optional_fields
|
||||
|
||||
fields[name] = convert_serializer_field(
|
||||
field, is_input=is_input, convert_choices_to_enum=convert_choices_to_enum
|
||||
field,
|
||||
is_input=is_input,
|
||||
convert_choices_to_enum=convert_choices_to_enum,
|
||||
force_optional=is_optional,
|
||||
)
|
||||
return fields
|
||||
|
||||
|
@ -74,7 +80,8 @@ class SerializerMutation(ClientIDMutation):
|
|||
exclude_fields=(),
|
||||
convert_choices_to_enum=True,
|
||||
_meta=None,
|
||||
**options
|
||||
optional_fields=(),
|
||||
**options,
|
||||
):
|
||||
if not serializer_class:
|
||||
raise Exception("serializer_class is required for the SerializerMutation")
|
||||
|
@ -98,6 +105,7 @@ class SerializerMutation(ClientIDMutation):
|
|||
is_input=True,
|
||||
convert_choices_to_enum=convert_choices_to_enum,
|
||||
lookup_field=lookup_field,
|
||||
optional_fields=optional_fields,
|
||||
)
|
||||
output_fields = fields_for_serializer(
|
||||
serializer,
|
||||
|
|
|
@ -18,7 +18,9 @@ def get_graphene_type_from_serializer_field(field):
|
|||
)
|
||||
|
||||
|
||||
def convert_serializer_field(field, is_input=True, convert_choices_to_enum=True):
|
||||
def convert_serializer_field(
|
||||
field, is_input=True, convert_choices_to_enum=True, force_optional=False
|
||||
):
|
||||
"""
|
||||
Converts a django rest frameworks field to a graphql field
|
||||
and marks the field as required if we are creating an input type
|
||||
|
@ -31,7 +33,10 @@ def convert_serializer_field(field, is_input=True, convert_choices_to_enum=True)
|
|||
graphql_type = get_graphene_type_from_serializer_field(field)
|
||||
|
||||
args = []
|
||||
kwargs = {"description": field.help_text, "required": is_input and field.required}
|
||||
kwargs = {
|
||||
"description": field.help_text,
|
||||
"required": is_input and field.required and not force_optional,
|
||||
}
|
||||
|
||||
# if it is a tuple or a list it means that we are returning
|
||||
# the graphql type and the child type
|
||||
|
|
|
@ -3,7 +3,7 @@ import datetime
|
|||
from pytest import raises
|
||||
from rest_framework import serializers
|
||||
|
||||
from graphene import Field, ResolveInfo
|
||||
from graphene import Field, ResolveInfo, String
|
||||
from graphene.types.inputobjecttype import InputObjectType
|
||||
|
||||
from ...types import DjangoObjectType
|
||||
|
@ -105,6 +105,16 @@ def test_exclude_fields():
|
|||
assert "created" not in MyMutation.Input._meta.fields
|
||||
|
||||
|
||||
def test_model_serializer_optional_fields():
|
||||
class MyMutation(SerializerMutation):
|
||||
class Meta:
|
||||
serializer_class = MyModelSerializer
|
||||
optional_fields = ("cool_name",)
|
||||
|
||||
assert "cool_name" in MyMutation.Input._meta.fields
|
||||
assert MyMutation.Input._meta.fields["cool_name"].type == String
|
||||
|
||||
|
||||
def test_write_only_field():
|
||||
class WriteOnlyFieldModelSerializer(serializers.ModelSerializer):
|
||||
password = serializers.CharField(write_only=True)
|
||||
|
@ -265,7 +275,7 @@ def test_perform_mutate_success():
|
|||
result = MyMethodMutation.mutate_and_get_payload(
|
||||
None,
|
||||
mock_info(),
|
||||
**{"cool_name": "Narf", "last_edited": datetime.date(2020, 1, 4)}
|
||||
**{"cool_name": "Narf", "last_edited": datetime.date(2020, 1, 4)},
|
||||
)
|
||||
|
||||
assert result.errors is None
|
||||
|
|
|
@ -42,6 +42,7 @@ DEFAULTS = {
|
|||
# https://github.com/graphql/graphiql/tree/main/packages/graphiql#options
|
||||
"GRAPHIQL_HEADER_EDITOR_ENABLED": True,
|
||||
"GRAPHIQL_SHOULD_PERSIST_HEADERS": False,
|
||||
"GRAPHIQL_INPUT_VALUE_DEPRECATION": False,
|
||||
"ATOMIC_MUTATIONS": False,
|
||||
"TESTING_ENDPOINT": "/graphql",
|
||||
}
|
||||
|
|
|
@ -122,6 +122,7 @@
|
|||
onEditOperationName: onEditOperationName,
|
||||
isHeadersEditorEnabled: GRAPHENE_SETTINGS.graphiqlHeaderEditorEnabled,
|
||||
shouldPersistHeaders: GRAPHENE_SETTINGS.graphiqlShouldPersistHeaders,
|
||||
inputValueDeprecation: GRAPHENE_SETTINGS.graphiqlInputValueDeprecation,
|
||||
query: query,
|
||||
};
|
||||
if (parameters.variables) {
|
||||
|
|
|
@ -54,6 +54,7 @@ add "&raw" to the end of the URL within a browser.
|
|||
{% endif %}
|
||||
graphiqlHeaderEditorEnabled: {{ graphiql_header_editor_enabled|yesno:"true,false" }},
|
||||
graphiqlShouldPersistHeaders: {{ graphiql_should_persist_headers|yesno:"true,false" }},
|
||||
graphiqlInputValueDeprecation: {{ graphiql_input_value_deprecation|yesno:"true,false" }},
|
||||
};
|
||||
</script>
|
||||
<script src="{% static 'graphene_django/graphiql.js' %}"></script>
|
||||
|
|
|
@ -97,7 +97,7 @@ class CNNReporter(Reporter):
|
|||
|
||||
class APNewsReporter(Reporter):
|
||||
"""
|
||||
This class only inherits from Reporter for testing multi table inheritence
|
||||
This class only inherits from Reporter for testing multi table inheritance
|
||||
similar to what you'd see in django-polymorphic
|
||||
"""
|
||||
|
||||
|
|
|
@ -102,10 +102,8 @@ def validate_fields(type_, model, fields, only_fields, exclude_fields):
|
|||
if name in all_field_names:
|
||||
# Field is a custom field
|
||||
warnings.warn(
|
||||
(
|
||||
'Excluding the custom field "{field_name}" on DjangoObjectType "{type_}" has no effect. '
|
||||
'Either remove the custom field or remove the field from the "exclude" list.'
|
||||
).format(field_name=name, type_=type_)
|
||||
f'Excluding the custom field "{name}" on DjangoObjectType "{type_}" has no effect. '
|
||||
'Either remove the custom field or remove the field from the "exclude" list.'
|
||||
)
|
||||
else:
|
||||
if not hasattr(model, name):
|
||||
|
|
|
@ -4,6 +4,7 @@ import warnings
|
|||
from django.test import Client, TestCase, TransactionTestCase
|
||||
|
||||
from graphene_django.settings import graphene_settings
|
||||
from graphene_django.utils.utils import _DJANGO_VERSION_AT_LEAST_4_2
|
||||
|
||||
DEFAULT_GRAPHQL_URL = "/graphql"
|
||||
|
||||
|
@ -55,8 +56,14 @@ def graphql_query(
|
|||
else:
|
||||
body["variables"] = {"input": input_data}
|
||||
if headers:
|
||||
header_params = (
|
||||
{"headers": headers} if _DJANGO_VERSION_AT_LEAST_4_2 else headers
|
||||
)
|
||||
resp = client.post(
|
||||
graphql_url, json.dumps(body), content_type="application/json", **headers
|
||||
graphql_url,
|
||||
json.dumps(body),
|
||||
content_type="application/json",
|
||||
**header_params,
|
||||
)
|
||||
else:
|
||||
resp = client.post(
|
||||
|
|
|
@ -1,5 +1,6 @@
|
|||
import inspect
|
||||
|
||||
import django
|
||||
from django.db import connection, models, transaction
|
||||
from django.db.models.manager import Manager
|
||||
from django.utils.encoding import force_str
|
||||
|
@ -145,3 +146,8 @@ def bypass_get_queryset(resolver):
|
|||
"""
|
||||
resolver._bypass_get_queryset = True
|
||||
return resolver
|
||||
|
||||
|
||||
_DJANGO_VERSION_AT_LEAST_4_2 = django.VERSION[0] > 4 or (
|
||||
django.VERSION[0] >= 4 and django.VERSION[1] >= 2
|
||||
)
|
||||
|
|
|
@ -9,10 +9,17 @@ from django.shortcuts import render
|
|||
from django.utils.decorators import method_decorator
|
||||
from django.views.decorators.csrf import ensure_csrf_cookie
|
||||
from django.views.generic import View
|
||||
from graphql import OperationType, get_operation_ast, parse
|
||||
from graphql import (
|
||||
ExecutionResult,
|
||||
OperationType,
|
||||
execute,
|
||||
get_operation_ast,
|
||||
parse,
|
||||
validate_schema,
|
||||
)
|
||||
from graphql.error import GraphQLError
|
||||
from graphql.execution import ExecutionResult
|
||||
from graphql.execution.middleware import MiddlewareManager
|
||||
from graphql.validation import validate
|
||||
|
||||
from graphene import Schema
|
||||
from graphene_django.constants import MUTATION_ERRORS_FLAG
|
||||
|
@ -167,11 +174,13 @@ class GraphQLView(View):
|
|||
subscriptions_transport_ws_sri=self.subscriptions_transport_ws_sri,
|
||||
graphiql_plugin_explorer_version=self.graphiql_plugin_explorer_version,
|
||||
graphiql_plugin_explorer_sri=self.graphiql_plugin_explorer_sri,
|
||||
graphiql_plugin_explorer_css_sri=self.graphiql_plugin_explorer_css_sri,
|
||||
# The SUBSCRIPTION_PATH setting.
|
||||
subscription_path=self.subscription_path,
|
||||
# GraphiQL headers tab,
|
||||
graphiql_header_editor_enabled=graphene_settings.GRAPHIQL_HEADER_EDITOR_ENABLED,
|
||||
graphiql_should_persist_headers=graphene_settings.GRAPHIQL_SHOULD_PERSIST_HEADERS,
|
||||
graphiql_input_value_deprecation=graphene_settings.GRAPHIQL_INPUT_VALUE_DEPRECATION,
|
||||
)
|
||||
|
||||
if self.batch:
|
||||
|
@ -293,43 +302,56 @@ class GraphQLView(View):
|
|||
return None
|
||||
raise HttpError(HttpResponseBadRequest("Must provide query string."))
|
||||
|
||||
schema = self.schema.graphql_schema
|
||||
|
||||
schema_validation_errors = validate_schema(schema)
|
||||
if schema_validation_errors:
|
||||
return ExecutionResult(data=None, errors=schema_validation_errors)
|
||||
|
||||
try:
|
||||
document = parse(query)
|
||||
except Exception as e:
|
||||
return ExecutionResult(errors=[e])
|
||||
|
||||
if request.method.lower() == "get":
|
||||
operation_ast = get_operation_ast(document, operation_name)
|
||||
if operation_ast and operation_ast.operation != OperationType.QUERY:
|
||||
if show_graphiql:
|
||||
return None
|
||||
operation_ast = get_operation_ast(document, operation_name)
|
||||
|
||||
raise HttpError(
|
||||
HttpResponseNotAllowed(
|
||||
["POST"],
|
||||
"Can only perform a {} operation from a POST request.".format(
|
||||
operation_ast.operation.value
|
||||
),
|
||||
)
|
||||
if (
|
||||
request.method.lower() == "get"
|
||||
and operation_ast is not None
|
||||
and operation_ast.operation != OperationType.QUERY
|
||||
):
|
||||
if show_graphiql:
|
||||
return None
|
||||
|
||||
raise HttpError(
|
||||
HttpResponseNotAllowed(
|
||||
["POST"],
|
||||
"Can only perform a {} operation from a POST request.".format(
|
||||
operation_ast.operation.value
|
||||
),
|
||||
)
|
||||
try:
|
||||
extra_options = {}
|
||||
if self.execution_context_class:
|
||||
extra_options["execution_context_class"] = self.execution_context_class
|
||||
)
|
||||
|
||||
options = {
|
||||
"source": query,
|
||||
validation_errors = validate(schema, document)
|
||||
|
||||
if validation_errors:
|
||||
return ExecutionResult(data=None, errors=validation_errors)
|
||||
|
||||
try:
|
||||
execute_options = {
|
||||
"root_value": self.get_root_value(request),
|
||||
"context_value": self.get_context(request),
|
||||
"variable_values": variables,
|
||||
"operation_name": operation_name,
|
||||
"context_value": self.get_context(request),
|
||||
"middleware": self.get_middleware(request),
|
||||
}
|
||||
options.update(extra_options)
|
||||
if self.execution_context_class:
|
||||
execute_options[
|
||||
"execution_context_class"
|
||||
] = self.execution_context_class
|
||||
|
||||
operation_ast = get_operation_ast(document, operation_name)
|
||||
if (
|
||||
operation_ast
|
||||
operation_ast is not None
|
||||
and operation_ast.operation == OperationType.MUTATION
|
||||
and (
|
||||
graphene_settings.ATOMIC_MUTATIONS is True
|
||||
|
@ -337,12 +359,12 @@ class GraphQLView(View):
|
|||
)
|
||||
):
|
||||
with transaction.atomic():
|
||||
result = self.schema.execute(**options)
|
||||
result = execute(schema, document, **execute_options)
|
||||
if getattr(request, MUTATION_ERRORS_FLAG, False) is True:
|
||||
transaction.set_rollback(True)
|
||||
return result
|
||||
|
||||
return self.schema.execute(**options)
|
||||
return execute(schema, document, **execute_options)
|
||||
except Exception as e:
|
||||
return ExecutionResult(errors=[e])
|
||||
|
||||
|
|
4
setup.py
4
setup.py
|
@ -26,8 +26,7 @@ tests_require = [
|
|||
|
||||
|
||||
dev_requires = [
|
||||
"black==23.7.0",
|
||||
"ruff==0.0.283",
|
||||
"ruff==0.1.2",
|
||||
"pre-commit",
|
||||
] + tests_require
|
||||
|
||||
|
@ -50,6 +49,7 @@ setup(
|
|||
"Programming Language :: Python :: 3.9",
|
||||
"Programming Language :: Python :: 3.10",
|
||||
"Programming Language :: Python :: 3.11",
|
||||
"Programming Language :: Python :: 3.12",
|
||||
"Programming Language :: Python :: Implementation :: PyPy",
|
||||
"Framework :: Django",
|
||||
"Framework :: Django :: 3.2",
|
||||
|
|
10
tox.ini
10
tox.ini
|
@ -1,8 +1,9 @@
|
|||
[tox]
|
||||
envlist =
|
||||
py{38,39,310}-django32
|
||||
py{38,39}-django{41,42}
|
||||
py{310,311}-django{41,42,main}
|
||||
py{38,39}-django42
|
||||
py{310,311}-django{42,50,main}
|
||||
py312-django{42,50,main}
|
||||
pre-commit
|
||||
|
||||
[gh-actions]
|
||||
|
@ -11,12 +12,13 @@ python =
|
|||
3.9: py39
|
||||
3.10: py310
|
||||
3.11: py311
|
||||
3.12: py312
|
||||
|
||||
[gh-actions:env]
|
||||
DJANGO =
|
||||
3.2: django32
|
||||
4.1: django41
|
||||
4.2: django42
|
||||
5.0: django50
|
||||
main: djangomain
|
||||
|
||||
[testenv]
|
||||
|
@ -29,8 +31,8 @@ deps =
|
|||
-e.[test]
|
||||
psycopg2-binary
|
||||
django32: Django>=3.2,<4.0
|
||||
django41: Django>=4.1,<4.2
|
||||
django42: Django>=4.2,<4.3
|
||||
django50: Django>=5.0,<5.1
|
||||
djangomain: https://github.com/django/django/archive/main.zip
|
||||
commands = {posargs:pytest --cov=graphene_django graphene_django examples}
|
||||
|
||||
|
|
Loading…
Reference in New Issue
Block a user