mirror of
https://github.com/graphql-python/graphene.git
synced 2025-02-02 12:44:15 +03:00
Merge pull request #1357 from codebyaryan/master
add support for query validation
This commit is contained in:
commit
efc03533ae
83
.github/workflows/tests.yml
vendored
Normal file
83
.github/workflows/tests.yml
vendored
Normal file
|
@ -0,0 +1,83 @@
|
||||||
|
name: 📄 Tests
|
||||||
|
on:
|
||||||
|
push:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
- '*.x'
|
||||||
|
paths-ignore:
|
||||||
|
- 'docs/**'
|
||||||
|
- '*.md'
|
||||||
|
- '*.rst'
|
||||||
|
pull_request:
|
||||||
|
branches:
|
||||||
|
- master
|
||||||
|
- '*.x'
|
||||||
|
paths-ignore:
|
||||||
|
- 'docs/**'
|
||||||
|
- '*.md'
|
||||||
|
- '*.rst'
|
||||||
|
jobs:
|
||||||
|
tests:
|
||||||
|
# runs the test suite
|
||||||
|
name: ${{ matrix.name }}
|
||||||
|
runs-on: ${{ matrix.os }}
|
||||||
|
strategy:
|
||||||
|
fail-fast: false
|
||||||
|
matrix:
|
||||||
|
include:
|
||||||
|
- {name: '3.8', python: '3.8', os: ubuntu-latest, tox: py38}
|
||||||
|
- {name: '3.7', python: '3.7', os: ubuntu-latest, tox: py37}
|
||||||
|
- {name: '3.6', python: '3.6', os: ubuntu-latest, tox: py36}
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- uses: actions/setup-python@v2
|
||||||
|
with:
|
||||||
|
python-version: ${{ matrix.python }}
|
||||||
|
|
||||||
|
- name: update pip
|
||||||
|
run: |
|
||||||
|
pip install -U wheel
|
||||||
|
pip install -U setuptools
|
||||||
|
python -m pip install -U pip
|
||||||
|
|
||||||
|
- name: get pip cache dir
|
||||||
|
id: pip-cache
|
||||||
|
run: echo "::set-output name=dir::$(pip cache dir)"
|
||||||
|
|
||||||
|
- name: cache pip dependencies
|
||||||
|
uses: actions/cache@v2
|
||||||
|
with:
|
||||||
|
path: ${{ steps.pip-cache.outputs.dir }}
|
||||||
|
key: pip|${{ runner.os }}|${{ matrix.python }}|${{ hashFiles('setup.py') }}
|
||||||
|
|
||||||
|
- run: pip install tox
|
||||||
|
- run: tox -e ${{ matrix.tox }}
|
||||||
|
|
||||||
|
coveralls_finish:
|
||||||
|
# check coverage increase/decrease
|
||||||
|
needs: tests
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- name: Coveralls Finished
|
||||||
|
uses: AndreMiras/coveralls-python-action@develop
|
||||||
|
|
||||||
|
deploy:
|
||||||
|
# builds and publishes to PyPi
|
||||||
|
runs-on: ubuntu-latest
|
||||||
|
steps:
|
||||||
|
- uses: actions/checkout@v2
|
||||||
|
- name: Set up Python
|
||||||
|
uses: actions/setup-python@v2
|
||||||
|
with:
|
||||||
|
python-version: '3.7'
|
||||||
|
- name: Install dependencies
|
||||||
|
run: |
|
||||||
|
python -m pip install --upgrade pip
|
||||||
|
pip install build
|
||||||
|
- name: Build package
|
||||||
|
run: python -m build
|
||||||
|
- name: Publish package
|
||||||
|
uses: pypa/gh-action-pypi-publish@release/v1
|
||||||
|
with:
|
||||||
|
user: __token__
|
||||||
|
password: ${{ secrets.PYPI_API_TOKEN }}
|
42
.travis.yml
42
.travis.yml
|
@ -1,42 +0,0 @@
|
||||||
language: python
|
|
||||||
dist: xenial
|
|
||||||
|
|
||||||
python:
|
|
||||||
- "3.6"
|
|
||||||
- "3.7"
|
|
||||||
- "3.8"
|
|
||||||
|
|
||||||
install:
|
|
||||||
- pip install tox tox-travis
|
|
||||||
script: tox
|
|
||||||
after_success:
|
|
||||||
- pip install coveralls
|
|
||||||
- coveralls
|
|
||||||
cache:
|
|
||||||
directories:
|
|
||||||
- $HOME/.cache/pip
|
|
||||||
- $HOME/.cache/pre-commit
|
|
||||||
|
|
||||||
stages:
|
|
||||||
- test
|
|
||||||
- name: deploy
|
|
||||||
if: tag IS present
|
|
||||||
|
|
||||||
jobs:
|
|
||||||
fast_finish: true
|
|
||||||
include:
|
|
||||||
- env: TOXENV=pre-commit
|
|
||||||
python: 3.7
|
|
||||||
- env: TOXENV=mypy
|
|
||||||
python: 3.7
|
|
||||||
- stage: deploy
|
|
||||||
python: 3.7
|
|
||||||
after_success: true
|
|
||||||
deploy:
|
|
||||||
provider: pypi
|
|
||||||
user: syrusakbary
|
|
||||||
on:
|
|
||||||
tags: true
|
|
||||||
password:
|
|
||||||
secure: LHOp9DvYR+70vj4YVY8+JRNCKUOfYZREEUY3+4lMUpY7Zy5QwDfgEMXG64ybREH9dFldpUqVXRj53eeU3spfudSfh8NHkgqW7qihez2AhSnRc4dK6ooNfB+kLcSoJ4nUFGxdYImABc4V1hJvflGaUkTwDNYVxJF938bPaO797IvSbuI86llwqkvuK2Vegv9q/fy9sVGaF9VZIs4JgXwR5AyDR7FBArl+S84vWww4vTFD33hoE88VR4QvFY3/71BwRtQrnCMm7AOm31P9u29yi3bpzQpiOR2rHsgrsYdm597QzFKVxYwsmf9uAx2bpbSPy2WibunLePIvOFwm8xcfwnz4/J4ONBc5PSFmUytTWpzEnxb0bfUNLuYloIS24V6OZ8BfAhiYZ1AwySeJCQDM4Vk1V8IF6trTtyx5EW/uV9jsHCZ3LFsAD7UnFRTosIgN3SAK3ZWCEk5oF2IvjecsolEfkRXB3q9EjMkkuXRUeFDH2lWJLgNE27BzY6myvZVzPmfwZUsPBlPD/6w+WLSp97Rjgr9zS3T1d4ddqFM4ZYu04f2i7a/UUQqG+itzzuX5DWLPvzuNt37JB45mB9IsvxPyXZ6SkAcLl48NGyKok1f3vQnvphkfkl4lni29woKhaau8xlsuEDrcwOoeAsVcZXiItg+l+z2SlIwM0A06EvQ=
|
|
||||||
distributions: "sdist bdist_wheel"
|
|
|
@ -10,3 +10,4 @@ Execution
|
||||||
dataloader
|
dataloader
|
||||||
fileuploading
|
fileuploading
|
||||||
subscriptions
|
subscriptions
|
||||||
|
queryvalidation
|
||||||
|
|
120
docs/execution/queryvalidation.rst
Normal file
120
docs/execution/queryvalidation.rst
Normal file
|
@ -0,0 +1,120 @@
|
||||||
|
Query Validation
|
||||||
|
==========
|
||||||
|
GraphQL uses query validators to check if Query AST is valid and can be executed. Every GraphQL server implements
|
||||||
|
standard query validators. For example, there is an validator that tests if queried field exists on queried type, that
|
||||||
|
makes query fail with "Cannot query field on type" error if it doesn't.
|
||||||
|
|
||||||
|
To help with common use cases, graphene provides a few validation rules out of the box.
|
||||||
|
|
||||||
|
|
||||||
|
Depth limit Validator
|
||||||
|
-----------------
|
||||||
|
The depth limit validator helps to prevent execution of malicious
|
||||||
|
queries. It takes in the following arguments.
|
||||||
|
|
||||||
|
- ``max_depth`` is the maximum allowed depth for any operation in a GraphQL document.
|
||||||
|
- ``ignore`` Stops recursive depth checking based on a field name. Either a string or regexp to match the name, or a function that returns a boolean
|
||||||
|
- ``callback`` Called each time validation runs. Receives an Object which is a map of the depths for each operation.
|
||||||
|
|
||||||
|
Usage
|
||||||
|
-------
|
||||||
|
|
||||||
|
Here is how you would implement depth-limiting on your schema.
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
from graphql import validate, parse
|
||||||
|
from graphene import ObjectType, Schema, String
|
||||||
|
from graphene.validation import depth_limit_validator
|
||||||
|
|
||||||
|
|
||||||
|
class MyQuery(ObjectType):
|
||||||
|
name = String(required=True)
|
||||||
|
|
||||||
|
|
||||||
|
schema = Schema(query=MyQuery)
|
||||||
|
|
||||||
|
# queries which have a depth more than 20
|
||||||
|
# will not be executed.
|
||||||
|
|
||||||
|
validation_errors = validate(
|
||||||
|
schema=schema,
|
||||||
|
document_ast=parse('THE QUERY'),
|
||||||
|
rules=(
|
||||||
|
depth_limit_validator(
|
||||||
|
max_depth=20
|
||||||
|
),
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
Disable Introspection
|
||||||
|
---------------------
|
||||||
|
the disable introspection validation rule ensures that your schema cannot be introspected.
|
||||||
|
This is a useful security measure in production environments.
|
||||||
|
|
||||||
|
Usage
|
||||||
|
-------
|
||||||
|
|
||||||
|
Here is how you would disable introspection for your schema.
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
from graphql import validate, parse
|
||||||
|
from graphene import ObjectType, Schema, String
|
||||||
|
from graphene.validation import DisableIntrospection
|
||||||
|
|
||||||
|
|
||||||
|
class MyQuery(ObjectType):
|
||||||
|
name = String(required=True)
|
||||||
|
|
||||||
|
|
||||||
|
schema = Schema(query=MyQuery)
|
||||||
|
|
||||||
|
# introspection queries will not be executed.
|
||||||
|
|
||||||
|
validation_errors = validate(
|
||||||
|
schema=schema,
|
||||||
|
document_ast=parse('THE QUERY'),
|
||||||
|
rules=(
|
||||||
|
DisableIntrospection,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
Implementing custom validators
|
||||||
|
------------------------------
|
||||||
|
All custom query validators should extend the `ValidationRule <https://github.com/graphql-python/graphql-core/blob/v3.0.5/src/graphql/validation/rules/__init__.py#L37>`_
|
||||||
|
base class importable from the graphql.validation.rules module. Query validators are visitor classes. They are
|
||||||
|
instantiated at the time of query validation with one required argument (context: ASTValidationContext). In order to
|
||||||
|
perform validation, your validator class should define one or more of enter_* and leave_* methods. For possible
|
||||||
|
enter/leave items as well as details on function documentation, please see contents of the visitor module. To make
|
||||||
|
validation fail, you should call validator's report_error method with the instance of GraphQLError describing failure
|
||||||
|
reason. Here is an example query validator that visits field definitions in GraphQL query and fails query validation
|
||||||
|
if any of those fields are blacklisted:
|
||||||
|
|
||||||
|
.. code:: python
|
||||||
|
from graphql import GraphQLError
|
||||||
|
from graphql.language import FieldNode
|
||||||
|
from graphql.validation import ValidationRule
|
||||||
|
|
||||||
|
|
||||||
|
my_blacklist = (
|
||||||
|
"disallowed_field",
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
|
def is_blacklisted_field(field_name: str):
|
||||||
|
return field_name.lower() in my_blacklist
|
||||||
|
|
||||||
|
|
||||||
|
class BlackListRule(ValidationRule):
|
||||||
|
def enter_field(self, node: FieldNode, *_args):
|
||||||
|
field_name = node.name.value
|
||||||
|
if not is_blacklisted_field(field_name):
|
||||||
|
return
|
||||||
|
|
||||||
|
self.report_error(
|
||||||
|
GraphQLError(
|
||||||
|
f"Cannot query '{field_name}': field is blacklisted.", node,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
|
|
@ -393,108 +393,11 @@ class TypeMap(dict):
|
||||||
return type_
|
return type_
|
||||||
|
|
||||||
|
|
||||||
class UnforgivingExecutionContext(ExecutionContext):
|
|
||||||
"""An execution context which doesn't swallow exceptions.
|
|
||||||
|
|
||||||
The only difference between this execution context and the one it inherits from is
|
|
||||||
that ``except Exception`` is commented out within ``resolve_field_value_or_error``.
|
|
||||||
By removing that exception handling, only ``GraphQLError``'s are caught.
|
|
||||||
"""
|
|
||||||
|
|
||||||
def resolve_field_value_or_error(
|
|
||||||
self, field_def, field_nodes, resolve_fn, source, info
|
|
||||||
):
|
|
||||||
"""Resolve field to a value or an error.
|
|
||||||
|
|
||||||
Isolates the "ReturnOrAbrupt" behavior to not de-opt the resolve_field()
|
|
||||||
method. Returns the result of resolveFn or the abrupt-return Error object.
|
|
||||||
|
|
||||||
For internal use only.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
# Build a dictionary of arguments from the field.arguments AST, using the
|
|
||||||
# variables scope to fulfill any variable references.
|
|
||||||
args = get_argument_values(field_def, field_nodes[0], self.variable_values)
|
|
||||||
|
|
||||||
# Note that contrary to the JavaScript implementation, we pass the context
|
|
||||||
# value as part of the resolve info.
|
|
||||||
result = resolve_fn(source, info, **args)
|
|
||||||
if self.is_awaitable(result):
|
|
||||||
# noinspection PyShadowingNames
|
|
||||||
async def await_result():
|
|
||||||
try:
|
|
||||||
return await result
|
|
||||||
except GraphQLError as error:
|
|
||||||
return error
|
|
||||||
# except Exception as error:
|
|
||||||
# return GraphQLError(str(error), original_error=error)
|
|
||||||
|
|
||||||
# Yes, this is commented out code. It's been intentionally
|
|
||||||
# _not_ removed to show what has changed from the original
|
|
||||||
# implementation.
|
|
||||||
|
|
||||||
return await_result()
|
|
||||||
return result
|
|
||||||
except GraphQLError as error:
|
|
||||||
return error
|
|
||||||
# except Exception as error:
|
|
||||||
# return GraphQLError(str(error), original_error=error)
|
|
||||||
|
|
||||||
# Yes, this is commented out code. It's been intentionally _not_
|
|
||||||
# removed to show what has changed from the original implementation.
|
|
||||||
|
|
||||||
def complete_value_catching_error(
|
|
||||||
self, return_type, field_nodes, info, path, result
|
|
||||||
):
|
|
||||||
"""Complete a value while catching an error.
|
|
||||||
|
|
||||||
This is a small wrapper around completeValue which detects and logs errors in
|
|
||||||
the execution context.
|
|
||||||
"""
|
|
||||||
try:
|
|
||||||
if self.is_awaitable(result):
|
|
||||||
|
|
||||||
async def await_result():
|
|
||||||
value = self.complete_value(
|
|
||||||
return_type, field_nodes, info, path, await result
|
|
||||||
)
|
|
||||||
if self.is_awaitable(value):
|
|
||||||
return await value
|
|
||||||
return value
|
|
||||||
|
|
||||||
completed = await_result()
|
|
||||||
else:
|
|
||||||
completed = self.complete_value(
|
|
||||||
return_type, field_nodes, info, path, result
|
|
||||||
)
|
|
||||||
if self.is_awaitable(completed):
|
|
||||||
# noinspection PyShadowingNames
|
|
||||||
async def await_completed():
|
|
||||||
try:
|
|
||||||
return await completed
|
|
||||||
|
|
||||||
# CHANGE WAS MADE HERE
|
|
||||||
# ``GraphQLError`` was swapped in for ``except Exception``
|
|
||||||
except GraphQLError as error:
|
|
||||||
self.handle_field_error(error, field_nodes, path, return_type)
|
|
||||||
|
|
||||||
return await_completed()
|
|
||||||
return completed
|
|
||||||
|
|
||||||
# CHANGE WAS MADE HERE
|
|
||||||
# ``GraphQLError`` was swapped in for ``except Exception``
|
|
||||||
except GraphQLError as error:
|
|
||||||
self.handle_field_error(error, field_nodes, path, return_type)
|
|
||||||
return None
|
|
||||||
|
|
||||||
|
|
||||||
class Schema:
|
class Schema:
|
||||||
"""Schema Definition.
|
"""Schema Definition.
|
||||||
|
|
||||||
A Graphene Schema can execute operations (query, mutation, subscription) against the defined
|
A Graphene Schema can execute operations (query, mutation, subscription) against the defined
|
||||||
types. For advanced purposes, the schema can be used to lookup type definitions and answer
|
types. For advanced purposes, the schema can be used to lookup type definitions and answer
|
||||||
questions about the types through introspection.
|
questions about the types through introspection.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
query (Type[ObjectType]): Root query *ObjectType*. Describes entry point for fields to *read*
|
query (Type[ObjectType]): Root query *ObjectType*. Describes entry point for fields to *read*
|
||||||
data in your Schema.
|
data in your Schema.
|
||||||
|
@ -541,7 +444,6 @@ class Schema:
|
||||||
"""
|
"""
|
||||||
This function let the developer select a type in a given schema
|
This function let the developer select a type in a given schema
|
||||||
by accessing its attrs.
|
by accessing its attrs.
|
||||||
|
|
||||||
Example: using schema.Query for accessing the "Query" type in the Schema
|
Example: using schema.Query for accessing the "Query" type in the Schema
|
||||||
"""
|
"""
|
||||||
_type = self.graphql_schema.get_type(type_name)
|
_type = self.graphql_schema.get_type(type_name)
|
||||||
|
@ -556,11 +458,9 @@ class Schema:
|
||||||
|
|
||||||
def execute(self, *args, **kwargs):
|
def execute(self, *args, **kwargs):
|
||||||
"""Execute a GraphQL query on the schema.
|
"""Execute a GraphQL query on the schema.
|
||||||
|
|
||||||
Use the `graphql_sync` function from `graphql-core` to provide the result
|
Use the `graphql_sync` function from `graphql-core` to provide the result
|
||||||
for a query string. Most of the time this method will be called by one of the Graphene
|
for a query string. Most of the time this method will be called by one of the Graphene
|
||||||
:ref:`Integrations` via a web request.
|
:ref:`Integrations` via a web request.
|
||||||
|
|
||||||
Args:
|
Args:
|
||||||
request_string (str or Document): GraphQL request (query, mutation or subscription)
|
request_string (str or Document): GraphQL request (query, mutation or subscription)
|
||||||
as string or parsed AST form from `graphql-core`.
|
as string or parsed AST form from `graphql-core`.
|
||||||
|
@ -577,7 +477,6 @@ class Schema:
|
||||||
defined in `graphql-core`.
|
defined in `graphql-core`.
|
||||||
execution_context_class (ExecutionContext, optional): The execution context class
|
execution_context_class (ExecutionContext, optional): The execution context class
|
||||||
to use when resolving queries and mutations.
|
to use when resolving queries and mutations.
|
||||||
|
|
||||||
Returns:
|
Returns:
|
||||||
:obj:`ExecutionResult` containing any data and errors for the operation.
|
:obj:`ExecutionResult` containing any data and errors for the operation.
|
||||||
"""
|
"""
|
||||||
|
@ -586,7 +485,6 @@ class Schema:
|
||||||
|
|
||||||
async def execute_async(self, *args, **kwargs):
|
async def execute_async(self, *args, **kwargs):
|
||||||
"""Execute a GraphQL query on the schema asynchronously.
|
"""Execute a GraphQL query on the schema asynchronously.
|
||||||
|
|
||||||
Same as `execute`, but uses `graphql` instead of `graphql_sync`.
|
Same as `execute`, but uses `graphql` instead of `graphql_sync`.
|
||||||
"""
|
"""
|
||||||
kwargs = normalize_execute_kwargs(kwargs)
|
kwargs = normalize_execute_kwargs(kwargs)
|
||||||
|
|
|
@ -7,7 +7,7 @@ from graphene.tests.utils import dedent
|
||||||
from ..field import Field
|
from ..field import Field
|
||||||
from ..objecttype import ObjectType
|
from ..objecttype import ObjectType
|
||||||
from ..scalars import String
|
from ..scalars import String
|
||||||
from ..schema import Schema, UnforgivingExecutionContext
|
from ..schema import Schema
|
||||||
|
|
||||||
|
|
||||||
class MyOtherType(ObjectType):
|
class MyOtherType(ObjectType):
|
||||||
|
@ -69,115 +69,3 @@ def test_schema_requires_query_type():
|
||||||
assert len(result.errors) == 1
|
assert len(result.errors) == 1
|
||||||
error = result.errors[0]
|
error = result.errors[0]
|
||||||
assert error.message == "Query root type must be provided."
|
assert error.message == "Query root type must be provided."
|
||||||
|
|
||||||
|
|
||||||
class TestUnforgivingExecutionContext:
|
|
||||||
@fixture
|
|
||||||
def schema(self):
|
|
||||||
class ErrorFieldsMixin:
|
|
||||||
sanity_field = String()
|
|
||||||
expected_error_field = String()
|
|
||||||
unexpected_value_error_field = String()
|
|
||||||
unexpected_type_error_field = String()
|
|
||||||
unexpected_attribute_error_field = String()
|
|
||||||
unexpected_key_error_field = String()
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def resolve_sanity_field(obj, info):
|
|
||||||
return "not an error"
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def resolve_expected_error_field(obj, info):
|
|
||||||
raise GraphQLError("expected error")
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def resolve_unexpected_value_error_field(obj, info):
|
|
||||||
raise ValueError("unexpected error")
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def resolve_unexpected_type_error_field(obj, info):
|
|
||||||
raise TypeError("unexpected error")
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def resolve_unexpected_attribute_error_field(obj, info):
|
|
||||||
raise AttributeError("unexpected error")
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def resolve_unexpected_key_error_field(obj, info):
|
|
||||||
return {}["fails"]
|
|
||||||
|
|
||||||
class NestedObject(ErrorFieldsMixin, ObjectType):
|
|
||||||
pass
|
|
||||||
|
|
||||||
class MyQuery(ErrorFieldsMixin, ObjectType):
|
|
||||||
nested_object = Field(NestedObject)
|
|
||||||
nested_object_error = Field(NestedObject)
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def resolve_nested_object(obj, info):
|
|
||||||
return object()
|
|
||||||
|
|
||||||
@staticmethod
|
|
||||||
def resolve_nested_object_error(obj, info):
|
|
||||||
raise TypeError()
|
|
||||||
|
|
||||||
schema = Schema(query=MyQuery)
|
|
||||||
return schema
|
|
||||||
|
|
||||||
def test_sanity_check(self, schema):
|
|
||||||
# this should pass with no errors (sanity check)
|
|
||||||
result = schema.execute(
|
|
||||||
"query { sanityField }",
|
|
||||||
execution_context_class=UnforgivingExecutionContext,
|
|
||||||
)
|
|
||||||
assert not result.errors
|
|
||||||
assert result.data == {"sanityField": "not an error"}
|
|
||||||
|
|
||||||
def test_nested_sanity_check(self, schema):
|
|
||||||
# this should pass with no errors (sanity check)
|
|
||||||
result = schema.execute(
|
|
||||||
r"query { nestedObject { sanityField } }",
|
|
||||||
execution_context_class=UnforgivingExecutionContext,
|
|
||||||
)
|
|
||||||
assert not result.errors
|
|
||||||
assert result.data == {"nestedObject": {"sanityField": "not an error"}}
|
|
||||||
|
|
||||||
def test_graphql_error(self, schema):
|
|
||||||
result = schema.execute(
|
|
||||||
"query { expectedErrorField }",
|
|
||||||
execution_context_class=UnforgivingExecutionContext,
|
|
||||||
)
|
|
||||||
assert len(result.errors) == 1
|
|
||||||
assert result.errors[0].message == "expected error"
|
|
||||||
assert result.data == {"expectedErrorField": None}
|
|
||||||
|
|
||||||
def test_nested_graphql_error(self, schema):
|
|
||||||
result = schema.execute(
|
|
||||||
r"query { nestedObject { expectedErrorField } }",
|
|
||||||
execution_context_class=UnforgivingExecutionContext,
|
|
||||||
)
|
|
||||||
assert len(result.errors) == 1
|
|
||||||
assert result.errors[0].message == "expected error"
|
|
||||||
assert result.data == {"nestedObject": {"expectedErrorField": None}}
|
|
||||||
|
|
||||||
@mark.parametrize(
|
|
||||||
"field,exception",
|
|
||||||
[
|
|
||||||
("unexpectedValueErrorField", ValueError),
|
|
||||||
("unexpectedTypeErrorField", TypeError),
|
|
||||||
("unexpectedAttributeErrorField", AttributeError),
|
|
||||||
("unexpectedKeyErrorField", KeyError),
|
|
||||||
("nestedObject { unexpectedValueErrorField }", ValueError),
|
|
||||||
("nestedObject { unexpectedTypeErrorField }", TypeError),
|
|
||||||
("nestedObject { unexpectedAttributeErrorField }", AttributeError),
|
|
||||||
("nestedObject { unexpectedKeyErrorField }", KeyError),
|
|
||||||
("nestedObjectError { __typename }", TypeError),
|
|
||||||
],
|
|
||||||
)
|
|
||||||
def test_unexpected_error(self, field, exception, schema):
|
|
||||||
with raises(exception):
|
|
||||||
# no result, but the exception should be propagated
|
|
||||||
schema.execute(
|
|
||||||
f"query {{ {field} }}",
|
|
||||||
execution_context_class=UnforgivingExecutionContext,
|
|
||||||
)
|
|
||||||
|
|
6
graphene/utils/is_introspection_key.py
Normal file
6
graphene/utils/is_introspection_key.py
Normal file
|
@ -0,0 +1,6 @@
|
||||||
|
def is_introspection_key(key):
|
||||||
|
# from: https://spec.graphql.org/June2018/#sec-Schema
|
||||||
|
# > All types and directives defined within a schema must not have a name which
|
||||||
|
# > begins with "__" (two underscores), as this is used exclusively
|
||||||
|
# > by GraphQL’s introspection system.
|
||||||
|
return str(key).startswith("__")
|
8
graphene/validation/__init__.py
Normal file
8
graphene/validation/__init__.py
Normal file
|
@ -0,0 +1,8 @@
|
||||||
|
from .depth_limit import depth_limit_validator
|
||||||
|
from .disable_introspection import DisableIntrospection
|
||||||
|
|
||||||
|
|
||||||
|
__all__ = [
|
||||||
|
"DisableIntrospection",
|
||||||
|
"depth_limit_validator"
|
||||||
|
]
|
201
graphene/validation/depth_limit.py
Normal file
201
graphene/validation/depth_limit.py
Normal file
|
@ -0,0 +1,201 @@
|
||||||
|
# This is a Python port of https://github.com/stems/graphql-depth-limit
|
||||||
|
# which is licensed under the terms of the MIT license, reproduced below.
|
||||||
|
#
|
||||||
|
# -----------
|
||||||
|
#
|
||||||
|
# MIT License
|
||||||
|
#
|
||||||
|
# Copyright (c) 2017 Stem
|
||||||
|
#
|
||||||
|
# Permission is hereby granted, free of charge, to any person obtaining a copy
|
||||||
|
# of this software and associated documentation files (the "Software"), to deal
|
||||||
|
# in the Software without restriction, including without limitation the rights
|
||||||
|
# to use, copy, modify, merge, publish, distribute, sublicense, and/or sell
|
||||||
|
# copies of the Software, and to permit persons to whom the Software is
|
||||||
|
# furnished to do so, subject to the following conditions:
|
||||||
|
#
|
||||||
|
# The above copyright notice and this permission notice shall be included in all
|
||||||
|
# copies or substantial portions of the Software.
|
||||||
|
#
|
||||||
|
# THE SOFTWARE IS PROVIDED "AS IS", WITHOUT WARRANTY OF ANY KIND, EXPRESS OR
|
||||||
|
# IMPLIED, INCLUDING BUT NOT LIMITED TO THE WARRANTIES OF MERCHANTABILITY,
|
||||||
|
# FITNESS FOR A PARTICULAR PURPOSE AND NONINFRINGEMENT. IN NO EVENT SHALL THE
|
||||||
|
# AUTHORS OR COPYRIGHT HOLDERS BE LIABLE FOR ANY CLAIM, DAMAGES OR OTHER
|
||||||
|
# LIABILITY, WHETHER IN AN ACTION OF CONTRACT, TORT OR OTHERWISE, ARISING FROM,
|
||||||
|
# OUT OF OR IN CONNECTION WITH THE SOFTWARE OR THE USE OR OTHER DEALINGS IN THE
|
||||||
|
# SOFTWARE.
|
||||||
|
|
||||||
|
try:
|
||||||
|
from re import Pattern
|
||||||
|
except ImportError:
|
||||||
|
# backwards compatibility for v3.6
|
||||||
|
from typing import Pattern
|
||||||
|
|
||||||
|
from typing import Callable, Dict, List, Optional, Union
|
||||||
|
|
||||||
|
from graphql import GraphQLError
|
||||||
|
from graphql.validation import ValidationContext, ValidationRule
|
||||||
|
from graphql.language import (
|
||||||
|
DefinitionNode,
|
||||||
|
FieldNode,
|
||||||
|
FragmentDefinitionNode,
|
||||||
|
FragmentSpreadNode,
|
||||||
|
InlineFragmentNode,
|
||||||
|
Node,
|
||||||
|
OperationDefinitionNode,
|
||||||
|
)
|
||||||
|
|
||||||
|
from ..utils.is_introspection_key import is_introspection_key
|
||||||
|
|
||||||
|
|
||||||
|
IgnoreType = Union[Callable[[str], bool], Pattern, str]
|
||||||
|
|
||||||
|
|
||||||
|
def depth_limit_validator(
|
||||||
|
max_depth: int,
|
||||||
|
ignore: Optional[List[IgnoreType]] = None,
|
||||||
|
callback: Callable[[Dict[str, int]], None] = None,
|
||||||
|
):
|
||||||
|
class DepthLimitValidator(ValidationRule):
|
||||||
|
def __init__(self, validation_context: ValidationContext):
|
||||||
|
document = validation_context.document
|
||||||
|
definitions = document.definitions
|
||||||
|
|
||||||
|
fragments = get_fragments(definitions)
|
||||||
|
queries = get_queries_and_mutations(definitions)
|
||||||
|
query_depths = {}
|
||||||
|
|
||||||
|
for name in queries:
|
||||||
|
query_depths[name] = determine_depth(
|
||||||
|
node=queries[name],
|
||||||
|
fragments=fragments,
|
||||||
|
depth_so_far=0,
|
||||||
|
max_depth=max_depth,
|
||||||
|
context=validation_context,
|
||||||
|
operation_name=name,
|
||||||
|
ignore=ignore,
|
||||||
|
)
|
||||||
|
|
||||||
|
if callable(callback):
|
||||||
|
callback(query_depths)
|
||||||
|
super().__init__(validation_context)
|
||||||
|
|
||||||
|
return DepthLimitValidator
|
||||||
|
|
||||||
|
|
||||||
|
def get_fragments(
|
||||||
|
definitions: List[DefinitionNode],
|
||||||
|
) -> Dict[str, FragmentDefinitionNode]:
|
||||||
|
fragments = {}
|
||||||
|
for definition in definitions:
|
||||||
|
if isinstance(definition, FragmentDefinitionNode):
|
||||||
|
fragments[definition.name.value] = definition
|
||||||
|
|
||||||
|
return fragments
|
||||||
|
|
||||||
|
|
||||||
|
# This will actually get both queries and mutations.
|
||||||
|
# We can basically treat those the same
|
||||||
|
def get_queries_and_mutations(
|
||||||
|
definitions: List[DefinitionNode],
|
||||||
|
) -> Dict[str, OperationDefinitionNode]:
|
||||||
|
operations = {}
|
||||||
|
|
||||||
|
for definition in definitions:
|
||||||
|
if isinstance(definition, OperationDefinitionNode):
|
||||||
|
operation = definition.name.value if definition.name else "anonymous"
|
||||||
|
operations[operation] = definition
|
||||||
|
|
||||||
|
return operations
|
||||||
|
|
||||||
|
|
||||||
|
def determine_depth(
|
||||||
|
node: Node,
|
||||||
|
fragments: Dict[str, FragmentDefinitionNode],
|
||||||
|
depth_so_far: int,
|
||||||
|
max_depth: int,
|
||||||
|
context: ValidationContext,
|
||||||
|
operation_name: str,
|
||||||
|
ignore: Optional[List[IgnoreType]] = None,
|
||||||
|
) -> int:
|
||||||
|
if depth_so_far > max_depth:
|
||||||
|
context.report_error(
|
||||||
|
GraphQLError(
|
||||||
|
f"'{operation_name}' exceeds maximum operation depth of {max_depth}.",
|
||||||
|
[node],
|
||||||
|
)
|
||||||
|
)
|
||||||
|
return depth_so_far
|
||||||
|
|
||||||
|
if isinstance(node, FieldNode):
|
||||||
|
should_ignore = is_introspection_key(node.name.value) or is_ignored(
|
||||||
|
node, ignore
|
||||||
|
)
|
||||||
|
|
||||||
|
if should_ignore or not node.selection_set:
|
||||||
|
return 0
|
||||||
|
|
||||||
|
return 1 + max(
|
||||||
|
map(
|
||||||
|
lambda selection: determine_depth(
|
||||||
|
node=selection,
|
||||||
|
fragments=fragments,
|
||||||
|
depth_so_far=depth_so_far + 1,
|
||||||
|
max_depth=max_depth,
|
||||||
|
context=context,
|
||||||
|
operation_name=operation_name,
|
||||||
|
ignore=ignore,
|
||||||
|
),
|
||||||
|
node.selection_set.selections,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
elif isinstance(node, FragmentSpreadNode):
|
||||||
|
return determine_depth(
|
||||||
|
node=fragments[node.name.value],
|
||||||
|
fragments=fragments,
|
||||||
|
depth_so_far=depth_so_far,
|
||||||
|
max_depth=max_depth,
|
||||||
|
context=context,
|
||||||
|
operation_name=operation_name,
|
||||||
|
ignore=ignore,
|
||||||
|
)
|
||||||
|
elif isinstance(
|
||||||
|
node, (InlineFragmentNode, FragmentDefinitionNode, OperationDefinitionNode)
|
||||||
|
):
|
||||||
|
return max(
|
||||||
|
map(
|
||||||
|
lambda selection: determine_depth(
|
||||||
|
node=selection,
|
||||||
|
fragments=fragments,
|
||||||
|
depth_so_far=depth_so_far,
|
||||||
|
max_depth=max_depth,
|
||||||
|
context=context,
|
||||||
|
operation_name=operation_name,
|
||||||
|
ignore=ignore,
|
||||||
|
),
|
||||||
|
node.selection_set.selections,
|
||||||
|
)
|
||||||
|
)
|
||||||
|
else:
|
||||||
|
raise Exception(f"Depth crawler cannot handle: {node.kind}.") # pragma: no cover
|
||||||
|
|
||||||
|
|
||||||
|
def is_ignored(node: FieldNode, ignore: Optional[List[IgnoreType]] = None) -> bool:
|
||||||
|
if ignore is None:
|
||||||
|
return False
|
||||||
|
|
||||||
|
for rule in ignore:
|
||||||
|
field_name = node.name.value
|
||||||
|
if isinstance(rule, str):
|
||||||
|
if field_name == rule:
|
||||||
|
return True
|
||||||
|
elif isinstance(rule, Pattern):
|
||||||
|
if rule.match(field_name):
|
||||||
|
return True
|
||||||
|
elif callable(rule):
|
||||||
|
if rule(field_name):
|
||||||
|
return True
|
||||||
|
else:
|
||||||
|
raise ValueError(f"Invalid ignore option: {rule}.")
|
||||||
|
|
||||||
|
return False
|
17
graphene/validation/disable_introspection.py
Normal file
17
graphene/validation/disable_introspection.py
Normal file
|
@ -0,0 +1,17 @@
|
||||||
|
from graphql import GraphQLError
|
||||||
|
from graphql.language import FieldNode
|
||||||
|
from graphql.validation import ValidationRule
|
||||||
|
|
||||||
|
from ..utils.is_introspection_key import is_introspection_key
|
||||||
|
|
||||||
|
|
||||||
|
class DisableIntrospection(ValidationRule):
|
||||||
|
def enter_field(self, node: FieldNode, *_args):
|
||||||
|
field_name = node.name.value
|
||||||
|
if is_introspection_key(field_name):
|
||||||
|
self.report_error(
|
||||||
|
GraphQLError(
|
||||||
|
f"Cannot query '{field_name}': introspection is disabled.",
|
||||||
|
node,
|
||||||
|
)
|
||||||
|
)
|
0
graphene/validation/tests/__init__.py
Normal file
0
graphene/validation/tests/__init__.py
Normal file
279
graphene/validation/tests/test_depth_limit_validator.py
Normal file
279
graphene/validation/tests/test_depth_limit_validator.py
Normal file
|
@ -0,0 +1,279 @@
|
||||||
|
import re
|
||||||
|
|
||||||
|
from pytest import raises
|
||||||
|
from graphql import parse, get_introspection_query, validate
|
||||||
|
|
||||||
|
from ...types import Schema, ObjectType, Interface
|
||||||
|
from ...types import String, Int, List, Field
|
||||||
|
from ..depth_limit import depth_limit_validator
|
||||||
|
|
||||||
|
|
||||||
|
class PetType(Interface):
|
||||||
|
name = String(required=True)
|
||||||
|
|
||||||
|
class meta:
|
||||||
|
name = "Pet"
|
||||||
|
|
||||||
|
|
||||||
|
class CatType(ObjectType):
|
||||||
|
class meta:
|
||||||
|
name = "Cat"
|
||||||
|
interfaces = (PetType,)
|
||||||
|
|
||||||
|
|
||||||
|
class DogType(ObjectType):
|
||||||
|
class meta:
|
||||||
|
name = "Dog"
|
||||||
|
interfaces = (PetType,)
|
||||||
|
|
||||||
|
|
||||||
|
class AddressType(ObjectType):
|
||||||
|
street = String(required=True)
|
||||||
|
number = Int(required=True)
|
||||||
|
city = String(required=True)
|
||||||
|
country = String(required=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
name = "Address"
|
||||||
|
|
||||||
|
|
||||||
|
class HumanType(ObjectType):
|
||||||
|
name = String(required=True)
|
||||||
|
email = String(required=True)
|
||||||
|
address = Field(AddressType, required=True)
|
||||||
|
pets = List(PetType, required=True)
|
||||||
|
|
||||||
|
class Meta:
|
||||||
|
name = "Human"
|
||||||
|
|
||||||
|
|
||||||
|
class Query(ObjectType):
|
||||||
|
user = Field(
|
||||||
|
HumanType,
|
||||||
|
required=True,
|
||||||
|
name=String()
|
||||||
|
)
|
||||||
|
version = String(
|
||||||
|
required=True
|
||||||
|
)
|
||||||
|
user1 = Field(
|
||||||
|
HumanType,
|
||||||
|
required=True
|
||||||
|
)
|
||||||
|
user2 = Field(
|
||||||
|
HumanType,
|
||||||
|
required=True
|
||||||
|
)
|
||||||
|
user3 = Field(
|
||||||
|
HumanType,
|
||||||
|
required=True
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def resolve_user(root, info, name=None):
|
||||||
|
pass
|
||||||
|
|
||||||
|
|
||||||
|
schema = Schema(query=Query)
|
||||||
|
|
||||||
|
|
||||||
|
def run_query(query: str, max_depth: int, ignore=None):
|
||||||
|
document = parse(query)
|
||||||
|
|
||||||
|
result = None
|
||||||
|
|
||||||
|
def callback(query_depths):
|
||||||
|
nonlocal result
|
||||||
|
result = query_depths
|
||||||
|
|
||||||
|
errors = validate(
|
||||||
|
schema=schema.graphql_schema,
|
||||||
|
document_ast=document,
|
||||||
|
rules=(
|
||||||
|
depth_limit_validator(
|
||||||
|
max_depth=max_depth,
|
||||||
|
ignore=ignore,
|
||||||
|
callback=callback
|
||||||
|
),
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
return errors, result
|
||||||
|
|
||||||
|
|
||||||
|
def test_should_count_depth_without_fragment():
|
||||||
|
query = """
|
||||||
|
query read0 {
|
||||||
|
version
|
||||||
|
}
|
||||||
|
query read1 {
|
||||||
|
version
|
||||||
|
user {
|
||||||
|
name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
query read2 {
|
||||||
|
matt: user(name: "matt") {
|
||||||
|
email
|
||||||
|
}
|
||||||
|
andy: user(name: "andy") {
|
||||||
|
email
|
||||||
|
address {
|
||||||
|
city
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
query read3 {
|
||||||
|
matt: user(name: "matt") {
|
||||||
|
email
|
||||||
|
}
|
||||||
|
andy: user(name: "andy") {
|
||||||
|
email
|
||||||
|
address {
|
||||||
|
city
|
||||||
|
}
|
||||||
|
pets {
|
||||||
|
name
|
||||||
|
owner {
|
||||||
|
name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
|
||||||
|
expected = {"read0": 0, "read1": 1, "read2": 2, "read3": 3}
|
||||||
|
|
||||||
|
errors, result = run_query(query, 10)
|
||||||
|
assert not errors
|
||||||
|
assert result == expected
|
||||||
|
|
||||||
|
|
||||||
|
def test_should_count_with_fragments():
|
||||||
|
query = """
|
||||||
|
query read0 {
|
||||||
|
... on Query {
|
||||||
|
version
|
||||||
|
}
|
||||||
|
}
|
||||||
|
query read1 {
|
||||||
|
version
|
||||||
|
user {
|
||||||
|
... on Human {
|
||||||
|
name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
fragment humanInfo on Human {
|
||||||
|
email
|
||||||
|
}
|
||||||
|
fragment petInfo on Pet {
|
||||||
|
name
|
||||||
|
owner {
|
||||||
|
name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
query read2 {
|
||||||
|
matt: user(name: "matt") {
|
||||||
|
...humanInfo
|
||||||
|
}
|
||||||
|
andy: user(name: "andy") {
|
||||||
|
...humanInfo
|
||||||
|
address {
|
||||||
|
city
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
query read3 {
|
||||||
|
matt: user(name: "matt") {
|
||||||
|
...humanInfo
|
||||||
|
}
|
||||||
|
andy: user(name: "andy") {
|
||||||
|
... on Human {
|
||||||
|
email
|
||||||
|
}
|
||||||
|
address {
|
||||||
|
city
|
||||||
|
}
|
||||||
|
pets {
|
||||||
|
...petInfo
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
|
||||||
|
expected = {"read0": 0, "read1": 1, "read2": 2, "read3": 3}
|
||||||
|
|
||||||
|
errors, result = run_query(query, 10)
|
||||||
|
assert not errors
|
||||||
|
assert result == expected
|
||||||
|
|
||||||
|
|
||||||
|
def test_should_ignore_the_introspection_query():
|
||||||
|
errors, result = run_query(get_introspection_query(), 10)
|
||||||
|
assert not errors
|
||||||
|
assert result == {"IntrospectionQuery": 0}
|
||||||
|
|
||||||
|
|
||||||
|
def test_should_catch_very_deep_query():
|
||||||
|
query = """{
|
||||||
|
user {
|
||||||
|
pets {
|
||||||
|
owner {
|
||||||
|
pets {
|
||||||
|
owner {
|
||||||
|
pets {
|
||||||
|
name
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
errors, result = run_query(query, 4)
|
||||||
|
|
||||||
|
assert len(errors) == 1
|
||||||
|
assert errors[0].message == "'anonymous' exceeds maximum operation depth of 4."
|
||||||
|
|
||||||
|
|
||||||
|
def test_should_ignore_field():
|
||||||
|
query = """
|
||||||
|
query read1 {
|
||||||
|
user { address { city } }
|
||||||
|
}
|
||||||
|
query read2 {
|
||||||
|
user1 { address { city } }
|
||||||
|
user2 { address { city } }
|
||||||
|
user3 { address { city } }
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
|
||||||
|
errors, result = run_query(
|
||||||
|
query,
|
||||||
|
10,
|
||||||
|
ignore=[
|
||||||
|
"user1",
|
||||||
|
re.compile("user2"),
|
||||||
|
lambda field_name: field_name == "user3",
|
||||||
|
],
|
||||||
|
)
|
||||||
|
|
||||||
|
expected = {"read1": 2, "read2": 0}
|
||||||
|
assert not errors
|
||||||
|
assert result == expected
|
||||||
|
|
||||||
|
|
||||||
|
def test_should_raise_invalid_ignore():
|
||||||
|
query = """
|
||||||
|
query read1 {
|
||||||
|
user { address { city } }
|
||||||
|
}
|
||||||
|
"""
|
||||||
|
with raises(ValueError, match="Invalid ignore option:"):
|
||||||
|
run_query(
|
||||||
|
query,
|
||||||
|
10,
|
||||||
|
ignore=[True],
|
||||||
|
)
|
43
graphene/validation/tests/test_disable_introspection.py
Normal file
43
graphene/validation/tests/test_disable_introspection.py
Normal file
|
@ -0,0 +1,43 @@
|
||||||
|
from graphql import parse, validate
|
||||||
|
|
||||||
|
from ...types import Schema, ObjectType, String
|
||||||
|
from ..disable_introspection import DisableIntrospection
|
||||||
|
|
||||||
|
|
||||||
|
class Query(ObjectType):
|
||||||
|
name = String(
|
||||||
|
required=True
|
||||||
|
)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def resolve_name(root, info):
|
||||||
|
return "Hello world!"
|
||||||
|
|
||||||
|
|
||||||
|
schema = Schema(query=Query)
|
||||||
|
|
||||||
|
|
||||||
|
def run_query(query: str):
|
||||||
|
document = parse(query)
|
||||||
|
|
||||||
|
errors = validate(
|
||||||
|
schema=schema.graphql_schema,
|
||||||
|
document_ast=document,
|
||||||
|
rules=(
|
||||||
|
DisableIntrospection,
|
||||||
|
),
|
||||||
|
)
|
||||||
|
|
||||||
|
return errors
|
||||||
|
|
||||||
|
|
||||||
|
def test_disallows_introspection_queries():
|
||||||
|
errors = run_query("{ __schema { queryType { name } } }")
|
||||||
|
|
||||||
|
assert len(errors) == 1
|
||||||
|
assert errors[0].message == "Cannot query '__schema': introspection is disabled."
|
||||||
|
|
||||||
|
|
||||||
|
def test_allows_non_introspection_queries():
|
||||||
|
errors = run_query("{ name }")
|
||||||
|
assert len(errors) == 0
|
Loading…
Reference in New Issue
Block a user