mirror of
https://github.com/graphql-python/graphene-django.git
synced 2025-07-13 17:52:19 +03:00
generating filters from meta specification
This commit is contained in:
parent
75946f97f5
commit
25a5ceb2a8
|
@ -1,5 +1,7 @@
|
||||||
"""Filters to ElasticSearch"""
|
"""Filters to ElasticSearch"""
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
|
|
||||||
|
import six
|
||||||
from elasticsearch_dsl import Q
|
from elasticsearch_dsl import Q
|
||||||
from graphene import String
|
from graphene import String
|
||||||
|
|
||||||
|
@ -18,13 +20,15 @@ class StringFilterES(object): # pylint: disable=R0902
|
||||||
"term": lambda name, value: Q('term', **{name: value}),
|
"term": lambda name, value: Q('term', **{name: value}),
|
||||||
}
|
}
|
||||||
|
|
||||||
def __init__(self, name=None, attr=None):
|
def __init__(self, name=None, attr=None, lookup_expressions=None, default_expr=None):
|
||||||
"""
|
"""
|
||||||
:param name: Name of the field. This is the name that will be exported.
|
:param name: Name of the field. This is the name that will be exported.
|
||||||
:param attr: Path to the index attr that will be used as filter.
|
:param attr: Path to the index attr that will be used as filter.
|
||||||
"""
|
"""
|
||||||
assert name or attr, "At least the field name or the field attr should be passed"
|
assert name or attr, "At least the field name or the field attr should be passed"
|
||||||
self.field_name = name or attr.replace('.', '_')
|
self.field_name = name or attr.replace('.', '_')
|
||||||
|
self.default_expr = default_expr or self.default_expr
|
||||||
|
self.lookup_expressions = lookup_expressions
|
||||||
self.argument = String().Argument()
|
self.argument = String().Argument()
|
||||||
self.fields = self.generate_fields()
|
self.fields = self.generate_fields()
|
||||||
|
|
||||||
|
@ -36,9 +40,16 @@ class StringFilterES(object): # pylint: disable=R0902
|
||||||
expressions combinations.
|
expressions combinations.
|
||||||
"""
|
"""
|
||||||
fields = OrderedDict()
|
fields = OrderedDict()
|
||||||
for variant in self.variants:
|
if self.lookup_expressions:
|
||||||
variant_name = self.field_name if variant in ["default", self.default_expr] \
|
|
||||||
else "%s_%s" % (self.field_name, variant)
|
for variant in self.lookup_expressions:
|
||||||
|
if variant in self.variants:
|
||||||
|
variant_name = self.field_name if variant in ["default", self.default_expr] \
|
||||||
|
else "%s_%s" % (self.field_name, variant)
|
||||||
|
fields[variant_name] = self
|
||||||
|
|
||||||
|
else:
|
||||||
|
variant_name = self.field_name
|
||||||
fields[variant_name] = self
|
fields[variant_name] = self
|
||||||
|
|
||||||
return fields
|
return fields
|
||||||
|
@ -50,7 +61,7 @@ class StringFilterES(object): # pylint: disable=R0902
|
||||||
"""
|
"""
|
||||||
queries = []
|
queries = []
|
||||||
|
|
||||||
for argument, value in arguments.iteritems():
|
for argument, value in six.iteritems(arguments):
|
||||||
if argument in self.fields:
|
if argument in self.fields:
|
||||||
|
|
||||||
if argument == self.field_name:
|
if argument == self.field_name:
|
||||||
|
|
|
@ -1,12 +1,20 @@
|
||||||
"""Fields"""
|
"""Fields"""
|
||||||
|
import copy
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
|
|
||||||
from elasticsearch_dsl import Q
|
from elasticsearch_dsl import Q
|
||||||
|
from django_elasticsearch_dsl import ObjectField, StringField, TextField
|
||||||
from django.utils import six
|
from django.utils import six
|
||||||
|
from django_filters.utils import try_dbfield
|
||||||
from django_filters.filterset import BaseFilterSet
|
from django_filters.filterset import BaseFilterSet
|
||||||
|
|
||||||
from .filters import StringFilterES
|
from .filters import StringFilterES
|
||||||
|
|
||||||
|
# Basic conversion from ES fields to FilterES fields
|
||||||
|
FILTER_FOR_ESFIELD_DEFAULTS = {
|
||||||
|
StringField: {'filter_class': StringFilterES},
|
||||||
|
TextField: {'filter_class': StringFilterES},
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
class FilterSetESOptions(object):
|
class FilterSetESOptions(object):
|
||||||
"""Basic FilterSetES options to Metadata"""
|
"""Basic FilterSetES options to Metadata"""
|
||||||
|
@ -14,8 +22,68 @@ class FilterSetESOptions(object):
|
||||||
"""
|
"""
|
||||||
The field option is combined with the index to automatically generate
|
The field option is combined with the index to automatically generate
|
||||||
filters.
|
filters.
|
||||||
|
|
||||||
|
The includes option accept two kind of syntax:
|
||||||
|
- a list of field names
|
||||||
|
- a dictionary of field names mapped to a list of expressions
|
||||||
|
|
||||||
|
Example:
|
||||||
|
class UserFilter(FilterSetES):
|
||||||
|
class Meta:
|
||||||
|
index = UserIndex
|
||||||
|
includes = ['username', 'last_login']
|
||||||
|
|
||||||
|
or
|
||||||
|
|
||||||
|
class UserFilter(FilterSetES):
|
||||||
|
class Meta:
|
||||||
|
index = UserIndex
|
||||||
|
includes = {
|
||||||
|
'username': ['term']
|
||||||
|
'last_login': ['lte', 'gte]
|
||||||
|
}
|
||||||
|
|
||||||
|
The list syntax will create an filter with a behavior by default,
|
||||||
|
for each field included in includes. The dictionary syntax will
|
||||||
|
create a filter for each expression declared for its corresponding
|
||||||
|
field.
|
||||||
|
|
||||||
|
Note that the generated filters will not overwrite filters
|
||||||
|
declared on the FilterSet.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
class UserFilter(FilterSetES):
|
||||||
|
username = StringFieldES('username', core_type='text', expr=['partial'])
|
||||||
|
class Meta:
|
||||||
|
index = UserIndex
|
||||||
|
includes = {
|
||||||
|
'username': ['term', 'word']
|
||||||
|
}
|
||||||
|
|
||||||
|
A query with username as a parameter, will match those words with the
|
||||||
|
username value as substring
|
||||||
|
|
||||||
|
The excludes option accept a list of field names.
|
||||||
|
|
||||||
|
Example:
|
||||||
|
class UserFilter(FilterSetES):
|
||||||
|
class Meta:
|
||||||
|
index = UserIndex
|
||||||
|
excludes = ['username', 'last_login']
|
||||||
|
|
||||||
|
or
|
||||||
|
|
||||||
|
It is necessary to provide includes or excludes. You cant provide a excludes empty to generate all fields
|
||||||
"""
|
"""
|
||||||
self.index = getattr(options, 'index', None)
|
self.index = getattr(options, 'index', None)
|
||||||
|
self.includes = getattr(options, 'includes', None)
|
||||||
|
self.excludes = getattr(options, 'excludes', None)
|
||||||
|
|
||||||
|
if self.index is None:
|
||||||
|
raise ValueError('You need provide a Index in Meta.')
|
||||||
|
if self.excludes is None and self.includes is None:
|
||||||
|
raise ValueError('You need provide includes or excludes field in Meta.')
|
||||||
|
|
||||||
self.model = self.index._doc_type.model if self.index else None
|
self.model = self.index._doc_type.model if self.index else None
|
||||||
|
|
||||||
|
|
||||||
|
@ -31,12 +99,15 @@ class FilterSetESMetaclass(type):
|
||||||
new_class = super(FilterSetESMetaclass, mcs).__new__(mcs, name, bases, attrs)
|
new_class = super(FilterSetESMetaclass, mcs).__new__(mcs, name, bases, attrs)
|
||||||
|
|
||||||
if issubclass(new_class, BaseFilterSet):
|
if issubclass(new_class, BaseFilterSet):
|
||||||
|
new_class._meta = FilterSetESOptions(getattr(new_class, 'Meta', None))
|
||||||
base_filters = OrderedDict()
|
base_filters = OrderedDict()
|
||||||
for name, filter_field in six.iteritems(declared_filters):
|
for name, filter_field in six.iteritems(declared_filters):
|
||||||
base_filters.update(filter_field.fields)
|
base_filters.update(filter_field.fields)
|
||||||
|
|
||||||
|
meta_filters = mcs.get_meta_filters(new_class._meta)
|
||||||
|
base_filters.update(OrderedDict(meta_filters))
|
||||||
new_class.base_filters = base_filters
|
new_class.base_filters = base_filters
|
||||||
|
|
||||||
new_class._meta = FilterSetESOptions(getattr(new_class, 'Meta', None))
|
|
||||||
return new_class
|
return new_class
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
@ -63,6 +134,95 @@ class FilterSetESMetaclass(type):
|
||||||
|
|
||||||
return OrderedDict(filters)
|
return OrderedDict(filters)
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_meta_filters(mcs, meta):
|
||||||
|
"""
|
||||||
|
Get filters from Meta configuration
|
||||||
|
:return: Field extracted from index and from the FilterSetES.
|
||||||
|
"""
|
||||||
|
index_fields = mcs.get_index_fields(meta)
|
||||||
|
|
||||||
|
meta_filters = []
|
||||||
|
for name, index_field, data in index_fields:
|
||||||
|
|
||||||
|
if isinstance(index_field, ObjectField):
|
||||||
|
meta_filters.extend((name, mcs.get_filter_object(name, index_field, data)))
|
||||||
|
else:
|
||||||
|
meta_filters.append((name, mcs.get_filter_exp(name, index_field, data)))
|
||||||
|
|
||||||
|
return meta_filters
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_index_fields(mcs, meta):
|
||||||
|
"""
|
||||||
|
Get fields from index that appears in the meta class configuration of the filter_set
|
||||||
|
:return: Tuple of (name, field, lookup_expr) describing name of the field, ES class of the field and lookup_expr
|
||||||
|
"""
|
||||||
|
index_fields = meta.index._doc_type._fields()
|
||||||
|
meta_includes = meta.includes
|
||||||
|
meta_excludes = meta.excludes
|
||||||
|
|
||||||
|
if isinstance(meta_includes, dict):
|
||||||
|
# The lookup_expr are defined in Meta
|
||||||
|
filter_fields = [(name, index_fields[name], data) for name, data in meta_includes.items()]
|
||||||
|
elif meta_includes is not None:
|
||||||
|
# The lookup_expr are not defined
|
||||||
|
filter_fields = [(name, index_fields[name], None) for name in meta_includes]
|
||||||
|
else:
|
||||||
|
# No `includes` are declared in meta, so all not `excludes` fields from index will be converted to filters
|
||||||
|
filter_fields = [(name, field, None) for name, field in index_fields.items() if name not in meta_excludes]
|
||||||
|
return filter_fields
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_filter_object(mcs, name, field, data):
|
||||||
|
"""Get filters from ObjectField"""
|
||||||
|
index_fields = []
|
||||||
|
|
||||||
|
properties = field._doc_class._doc_type.mapping.properties._params.get('properties', {})
|
||||||
|
|
||||||
|
for inner_name, inner_field in properties.items():
|
||||||
|
|
||||||
|
if data and inner_name not in data:
|
||||||
|
# This inner field is not filterable
|
||||||
|
continue
|
||||||
|
inner_data = data[inner_name] if data else None
|
||||||
|
|
||||||
|
index_fields.append(mcs.get_filter_exp(inner_name, inner_field, inner_data, root=name))
|
||||||
|
|
||||||
|
return index_fields
|
||||||
|
|
||||||
|
@classmethod
|
||||||
|
def get_filter_exp(mcs, name, field, data=None, root=None):
|
||||||
|
"""Initialize filter"""
|
||||||
|
field_data = try_dbfield(FILTER_FOR_ESFIELD_DEFAULTS.get, field.__class__) or {}
|
||||||
|
filter_class = field_data.get('filter_class')
|
||||||
|
|
||||||
|
extra = field_data.get('extra', {})
|
||||||
|
kwargs = copy.deepcopy(extra)
|
||||||
|
|
||||||
|
# Get lookup_expr from configuration
|
||||||
|
if data and 'lookup_exprs' in data:
|
||||||
|
if 'lookup_exprs' in kwargs:
|
||||||
|
kwargs['lookup_exprs'] = set(kwargs['lookup_exprs']).intersection(set(data['lookup_exprs']))
|
||||||
|
else:
|
||||||
|
kwargs['lookup_exprs'] = set(data['lookup_exprs'])
|
||||||
|
elif 'lookup_exprs' in kwargs:
|
||||||
|
kwargs['lookup_exprs'] = set(kwargs['lookup_exprs'])
|
||||||
|
|
||||||
|
kwargs['name'], kwargs['attr'] = mcs.get_name(name, root, data)
|
||||||
|
return filter_class(**kwargs)
|
||||||
|
|
||||||
|
@staticmethod
|
||||||
|
def get_name(name, root, data):
|
||||||
|
"""Get names of the field and the path to resolve it"""
|
||||||
|
field_name = data.get('name', None) if data else None
|
||||||
|
attr = data.get('attr', None) if data else None
|
||||||
|
if not field_name:
|
||||||
|
field_name = '{root}_{name}'.format(root=root, name=name) if root else name
|
||||||
|
if not attr:
|
||||||
|
attr = '{root}.{name}'.format(root=root, name=name) if root else name
|
||||||
|
return field_name, attr
|
||||||
|
|
||||||
|
|
||||||
class FilterSetES(six.with_metaclass(FilterSetESMetaclass, object)):
|
class FilterSetES(six.with_metaclass(FilterSetESMetaclass, object)):
|
||||||
"""FilterSet specific for ElasticSearch."""
|
"""FilterSet specific for ElasticSearch."""
|
||||||
|
@ -91,7 +251,7 @@ class FilterSetES(six.with_metaclass(FilterSetESMetaclass, object)):
|
||||||
:return: Generates a super query with bool as root, and combines all sub-queries from each argument.
|
:return: Generates a super query with bool as root, and combines all sub-queries from each argument.
|
||||||
"""
|
"""
|
||||||
query_base = Q("bool")
|
query_base = Q("bool")
|
||||||
for name, filter_es in six.iteritems(self.declared_filters):
|
for name, filter_es in six.iteritems(self.base_filters):
|
||||||
query_filter = filter_es.get_q(self.data) if len(self.data) else None
|
query_filter = filter_es.get_q(self.data) if len(self.data) else None
|
||||||
if query_filter is not None:
|
if query_filter is not None:
|
||||||
query_base += query_filter
|
query_base += query_filter
|
||||||
|
|
|
@ -16,19 +16,49 @@ class ArticleDocument(DocType):
|
||||||
class Meta(object):
|
class Meta(object):
|
||||||
"""Metaclass config"""
|
"""Metaclass config"""
|
||||||
model = Article
|
model = Article
|
||||||
|
fields = [
|
||||||
|
'headline',
|
||||||
|
]
|
||||||
|
|
||||||
|
|
||||||
class ArticleFilterES(FilterSetES):
|
class ArticleFilterESAsField(FilterSetES):
|
||||||
"""Article Filter for ES"""
|
"""Article Filter for ES"""
|
||||||
class Meta(object):
|
class Meta(object):
|
||||||
"""Metaclass data"""
|
"""Metaclass data"""
|
||||||
index = ArticleDocument
|
index = ArticleDocument
|
||||||
|
includes = []
|
||||||
|
|
||||||
headline = filters.StringFilterES(attr='headline')
|
headline = filters.StringFilterES(attr='headline')
|
||||||
|
|
||||||
|
|
||||||
|
class ArticleFilterESInMeta(FilterSetES):
|
||||||
|
"""Article Filter for ES"""
|
||||||
|
class Meta(object):
|
||||||
|
"""Metaclass data"""
|
||||||
|
index = ArticleDocument
|
||||||
|
includes = ['headline']
|
||||||
|
|
||||||
|
|
||||||
|
class ArticleFilterESInMetaDict(FilterSetES):
|
||||||
|
"""Article Filter for ES"""
|
||||||
|
class Meta(object):
|
||||||
|
"""Metaclass data"""
|
||||||
|
index = ArticleDocument
|
||||||
|
includes = {
|
||||||
|
'headline': {
|
||||||
|
'lookup_expressions': ['term', 'contains']
|
||||||
|
}
|
||||||
|
}
|
||||||
|
|
||||||
|
|
||||||
class ESFilterQuery(ObjectType):
|
class ESFilterQuery(ObjectType):
|
||||||
"""A query for ES fields"""
|
"""A query for ES fields"""
|
||||||
articles = DjangoESFilterConnectionField(
|
articles_as_field = DjangoESFilterConnectionField(
|
||||||
ArticleNode, filterset_class=ArticleFilterES
|
ArticleNode, filterset_class=ArticleFilterESAsField
|
||||||
|
)
|
||||||
|
articles_in_meta = DjangoESFilterConnectionField(
|
||||||
|
ArticleNode, filterset_class=ArticleFilterESInMeta
|
||||||
|
)
|
||||||
|
articles_in_meta_dict = DjangoESFilterConnectionField(
|
||||||
|
ArticleNode, filterset_class=ArticleFilterESInMetaDict
|
||||||
)
|
)
|
||||||
|
|
|
@ -4,19 +4,15 @@ import pytest
|
||||||
from mock import mock
|
from mock import mock
|
||||||
|
|
||||||
from graphene import Schema
|
from graphene import Schema
|
||||||
from graphene_django.tests.models import Article, Reporter
|
|
||||||
from graphene_django.filter.tests.test_fields import assert_arguments, ArticleNode
|
|
||||||
from graphene_django.utils import DJANGO_FILTER_INSTALLED, DJANGO_ELASTICSEARCH_DSL_INSTALLED
|
|
||||||
from graphene_django.elasticsearch.tests.filters import ArticleFilterES, ESFilterQuery
|
|
||||||
|
|
||||||
|
from graphene_django.elasticsearch.filter import filters
|
||||||
|
from graphene_django.tests.models import Article, Reporter
|
||||||
|
from graphene_django.utils import DJANGO_FILTER_INSTALLED, DJANGO_ELASTICSEARCH_DSL_INSTALLED
|
||||||
|
from graphene_django.elasticsearch.tests.filters import ESFilterQuery, ArticleDocument
|
||||||
|
|
||||||
pytestmark = []
|
pytestmark = []
|
||||||
|
|
||||||
if DJANGO_FILTER_INSTALLED and DJANGO_ELASTICSEARCH_DSL_INSTALLED:
|
if not DJANGO_FILTER_INSTALLED or not DJANGO_ELASTICSEARCH_DSL_INSTALLED:
|
||||||
from graphene_django.filter import (
|
|
||||||
DjangoFilterConnectionField,
|
|
||||||
)
|
|
||||||
else:
|
|
||||||
pytestmark.append(
|
pytestmark.append(
|
||||||
pytest.mark.skipif(
|
pytest.mark.skipif(
|
||||||
True, reason="django_filters not installed or not compatible"
|
True, reason="django_filters not installed or not compatible"
|
||||||
|
@ -26,14 +22,8 @@ else:
|
||||||
pytestmark.append(pytest.mark.django_db)
|
pytestmark.append(pytest.mark.django_db)
|
||||||
|
|
||||||
|
|
||||||
def test_filter_string_fields():
|
def fake_data():
|
||||||
field = DjangoFilterConnectionField(ArticleNode, filterset_class=ArticleFilterES)
|
|
||||||
assert_arguments(field, "headline", "headline_term")
|
|
||||||
|
|
||||||
|
|
||||||
def test_filter_query():
|
|
||||||
r1 = Reporter.objects.create(first_name="r1", last_name="r1", email="r1@test.com")
|
r1 = Reporter.objects.create(first_name="r1", last_name="r1", email="r1@test.com")
|
||||||
|
|
||||||
a1 = Article.objects.create(
|
a1 = Article.objects.create(
|
||||||
headline="a1",
|
headline="a1",
|
||||||
pub_date=datetime.now(),
|
pub_date=datetime.now(),
|
||||||
|
@ -48,10 +38,15 @@ def test_filter_query():
|
||||||
reporter=r1,
|
reporter=r1,
|
||||||
editor=r1,
|
editor=r1,
|
||||||
)
|
)
|
||||||
|
return a1, a2
|
||||||
|
|
||||||
|
|
||||||
|
def filter_generation(field, query_str, spected_arguments):
|
||||||
|
a1, a2 = fake_data()
|
||||||
|
|
||||||
query = """
|
query = """
|
||||||
query {
|
query {
|
||||||
articles {
|
%s(%s) {
|
||||||
edges {
|
edges {
|
||||||
node {
|
node {
|
||||||
headline
|
headline
|
||||||
|
@ -59,21 +54,37 @@ def test_filter_query():
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
"""
|
""" % (field, query_str)
|
||||||
|
|
||||||
mock_count = mock.Mock(return_value=3)
|
mock_count = mock.Mock(return_value=3)
|
||||||
mock_slice = mock.Mock(return_value=mock.Mock(to_queryset=mock.Mock(
|
mock_slice = mock.Mock(return_value=mock.Mock(to_queryset=mock.Mock(
|
||||||
return_value=Article.objects.filter(pk__in=[a1.id, a2.id])
|
return_value=Article.objects.filter(pk__in=[a1.id, a2.id])
|
||||||
)))
|
)))
|
||||||
|
mock_query = mock.Mock(return_value=ArticleDocument.search())
|
||||||
|
|
||||||
with mock.patch('django_elasticsearch_dsl.search.Search.count', mock_count),\
|
with mock.patch('django_elasticsearch_dsl.search.Search.count', mock_count),\
|
||||||
mock.patch('django_elasticsearch_dsl.search.Search.__getitem__', mock_slice):
|
mock.patch('django_elasticsearch_dsl.search.Search.__getitem__', mock_slice),\
|
||||||
|
mock.patch('elasticsearch_dsl.Search.query', mock_query):
|
||||||
|
|
||||||
schema = Schema(query=ESFilterQuery)
|
schema = Schema(query=ESFilterQuery)
|
||||||
result = schema.execute(query)
|
result = schema.execute(query)
|
||||||
|
|
||||||
assert not result.errors
|
assert not result.errors
|
||||||
|
|
||||||
assert len(result.data["articles"]["edges"]) == 2
|
mock_query.assert_called_with(filters.StringFilterES(attr='headline').get_q(spected_arguments))
|
||||||
assert result.data["articles"]["edges"][0]["node"]["headline"] == "a1"
|
|
||||||
assert result.data["articles"]["edges"][1]["node"]["headline"] == "a2"
|
assert len(result.data[field]["edges"]) == 2
|
||||||
|
assert result.data[field]["edges"][0]["node"]["headline"] == "a1"
|
||||||
|
assert result.data[field]["edges"][1]["node"]["headline"] == "a2"
|
||||||
|
|
||||||
|
|
||||||
|
def test_filter_as_field():
|
||||||
|
filter_generation("articlesAsField", "headline: \"A text\"", {"headline": "A text"})
|
||||||
|
|
||||||
|
|
||||||
|
def test_filter_in_meta():
|
||||||
|
filter_generation("articlesInMeta", "headline: \"A text\"", {"headline": "A text"})
|
||||||
|
|
||||||
|
|
||||||
|
def test_filter_in_meta_dict():
|
||||||
|
filter_generation("articlesInMetaDict", "headline: \"A text\"", {"headline": "A text"})
|
||||||
|
|
Loading…
Reference in New Issue
Block a user