mirror of
https://github.com/graphql-python/graphene-django.git
synced 2025-07-13 17:52:19 +03:00
run make format
This commit is contained in:
parent
3778806bae
commit
68e940c15d
|
@ -4,25 +4,36 @@ from graphene_django.filter import DjangoFilterConnectionField
|
||||||
|
|
||||||
class DjangoESFilterConnectionField(DjangoFilterConnectionField):
|
class DjangoESFilterConnectionField(DjangoFilterConnectionField):
|
||||||
"""A Field to replace DjangoFilterConnectionField manager by QuerysetBridge"""
|
"""A Field to replace DjangoFilterConnectionField manager by QuerysetBridge"""
|
||||||
|
|
||||||
def __init__(self, object_type, *args, **kwargs):
|
def __init__(self, object_type, *args, **kwargs):
|
||||||
"""Validating field allowed for this connection
|
"""Validating field allowed for this connection
|
||||||
:param object_type: DjangoObjectType
|
:param object_type: DjangoObjectType
|
||||||
"""
|
"""
|
||||||
fields = kwargs.get('fields', None)
|
fields = kwargs.get("fields", None)
|
||||||
if fields is not None:
|
if fields is not None:
|
||||||
raise ValueError('DjangoESFilterConnectionField do not permit argument fields yet.')
|
raise ValueError(
|
||||||
|
"DjangoESFilterConnectionField do not permit argument fields yet."
|
||||||
|
)
|
||||||
|
|
||||||
order_by = kwargs.get('order_by', None)
|
order_by = kwargs.get("order_by", None)
|
||||||
if order_by is not None:
|
if order_by is not None:
|
||||||
raise ValueError('DjangoESFilterConnectionField do not permit argument order_by yet.')
|
raise ValueError(
|
||||||
|
"DjangoESFilterConnectionField do not permit argument order_by yet."
|
||||||
|
)
|
||||||
|
|
||||||
filterset_class = kwargs.get('filterset_class', None)
|
filterset_class = kwargs.get("filterset_class", None)
|
||||||
if filterset_class is None:
|
if filterset_class is None:
|
||||||
raise ValueError('You should provide a FilterSetES as filterset_class argument.')
|
raise ValueError(
|
||||||
|
"You should provide a FilterSetES as filterset_class argument."
|
||||||
|
)
|
||||||
|
|
||||||
super(DjangoESFilterConnectionField, self).__init__(object_type, *args, **kwargs)
|
super(DjangoESFilterConnectionField, self).__init__(
|
||||||
|
object_type, *args, **kwargs
|
||||||
|
)
|
||||||
|
|
||||||
self.manager = ManagerProxy(search_manager=self.filterset_class._meta.index.search)
|
self.manager = ManagerProxy(
|
||||||
|
search_manager=self.filterset_class._meta.index.search
|
||||||
|
)
|
||||||
|
|
||||||
def get_manager(self):
|
def get_manager(self):
|
||||||
"""Returning a ManagerBridge to replace the direct use over the Model manager"""
|
"""Returning a ManagerBridge to replace the direct use over the Model manager"""
|
||||||
|
|
|
@ -5,11 +5,18 @@ from graphene_django.elasticsearch.filter.processors import ProcessorFactory
|
||||||
|
|
||||||
class FilterES(object):
|
class FilterES(object):
|
||||||
"""Fields specific to ElasticSearch."""
|
"""Fields specific to ElasticSearch."""
|
||||||
default_processor = 'term'
|
|
||||||
|
default_processor = "term"
|
||||||
default_argument = String()
|
default_argument = String()
|
||||||
|
|
||||||
def __init__(self, field_name, field_name_es=None, lookup_expressions=None,
|
def __init__(
|
||||||
default_processor=None, argument=None):
|
self,
|
||||||
|
field_name,
|
||||||
|
field_name_es=None,
|
||||||
|
lookup_expressions=None,
|
||||||
|
default_processor=None,
|
||||||
|
argument=None,
|
||||||
|
):
|
||||||
"""
|
"""
|
||||||
:param field_name: Name of the field. This is the name that will be exported.
|
:param field_name: Name of the field. This is the name that will be exported.
|
||||||
:param field_name_es: Path to the index attr that will be used as filter.
|
:param field_name_es: Path to the index attr that will be used as filter.
|
||||||
|
@ -31,10 +38,14 @@ class FilterES(object):
|
||||||
self.processor = None
|
self.processor = None
|
||||||
if self.lookup_expressions:
|
if self.lookup_expressions:
|
||||||
for variant in self.lookup_expressions:
|
for variant in self.lookup_expressions:
|
||||||
self.processor = ProcessorFactory.make_processor(variant, self, self.processor)
|
self.processor = ProcessorFactory.make_processor(
|
||||||
|
variant, self, self.processor
|
||||||
|
)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
self.processor = ProcessorFactory.make_processor(self.default_processor, self, self.processor)
|
self.processor = ProcessorFactory.make_processor(
|
||||||
|
self.default_processor, self, self.processor
|
||||||
|
)
|
||||||
|
|
||||||
self.argument = argument or self.default_argument
|
self.argument = argument or self.default_argument
|
||||||
self.fields = self.processor.generate_field()
|
self.fields = self.processor.generate_field()
|
||||||
|
@ -49,14 +60,17 @@ class FilterES(object):
|
||||||
|
|
||||||
class StringFilterES(FilterES):
|
class StringFilterES(FilterES):
|
||||||
"""String Fields specific to ElasticSearch."""
|
"""String Fields specific to ElasticSearch."""
|
||||||
default_processor = 'contains'
|
|
||||||
|
default_processor = "contains"
|
||||||
|
|
||||||
|
|
||||||
class BoolFilterES(FilterES):
|
class BoolFilterES(FilterES):
|
||||||
"""Boolean filter to ES"""
|
"""Boolean filter to ES"""
|
||||||
|
|
||||||
default_argument = Boolean()
|
default_argument = Boolean()
|
||||||
|
|
||||||
|
|
||||||
class NumberFilterES(FilterES):
|
class NumberFilterES(FilterES):
|
||||||
"""Filter to an numeric value to ES"""
|
"""Filter to an numeric value to ES"""
|
||||||
|
|
||||||
default_argument = Int()
|
default_argument = Int()
|
||||||
|
|
|
@ -3,8 +3,19 @@ import copy
|
||||||
from collections import OrderedDict
|
from collections import OrderedDict
|
||||||
from elasticsearch_dsl import Q
|
from elasticsearch_dsl import Q
|
||||||
from graphene import Enum, InputObjectType, Field, Int, Float
|
from graphene import Enum, InputObjectType, Field, Int, Float
|
||||||
from django_elasticsearch_dsl import StringField, TextField, BooleanField, IntegerField, FloatField, LongField, \
|
from django_elasticsearch_dsl import (
|
||||||
ShortField, DoubleField, DateField, KeywordField, ObjectField
|
StringField,
|
||||||
|
TextField,
|
||||||
|
BooleanField,
|
||||||
|
IntegerField,
|
||||||
|
FloatField,
|
||||||
|
LongField,
|
||||||
|
ShortField,
|
||||||
|
DoubleField,
|
||||||
|
DateField,
|
||||||
|
KeywordField,
|
||||||
|
ObjectField,
|
||||||
|
)
|
||||||
from django.utils import six
|
from django.utils import six
|
||||||
|
|
||||||
from django_filters.utils import try_dbfield
|
from django_filters.utils import try_dbfield
|
||||||
|
@ -15,34 +26,36 @@ from .filters import StringFilterES, FilterES, BoolFilterES, NumberFilterES
|
||||||
|
|
||||||
# Basic conversion from ES fields to FilterES fields
|
# Basic conversion from ES fields to FilterES fields
|
||||||
FILTER_FOR_ESFIELD_DEFAULTS = {
|
FILTER_FOR_ESFIELD_DEFAULTS = {
|
||||||
StringField: {'filter_class': StringFilterES},
|
StringField: {"filter_class": StringFilterES},
|
||||||
TextField: {'filter_class': StringFilterES},
|
TextField: {"filter_class": StringFilterES},
|
||||||
BooleanField: {'filter_class': BoolFilterES},
|
BooleanField: {"filter_class": BoolFilterES},
|
||||||
IntegerField: {'filter_class': NumberFilterES},
|
IntegerField: {"filter_class": NumberFilterES},
|
||||||
FloatField: {'filter_class': NumberFilterES, 'argument': Float()},
|
FloatField: {"filter_class": NumberFilterES, "argument": Float()},
|
||||||
LongField: {'filter_class': NumberFilterES, 'argument': Int()},
|
LongField: {"filter_class": NumberFilterES, "argument": Int()},
|
||||||
ShortField: {'filter_class': NumberFilterES, 'argument': Int()},
|
ShortField: {"filter_class": NumberFilterES, "argument": Int()},
|
||||||
DoubleField: {'filter_class': NumberFilterES, 'argument': Int()},
|
DoubleField: {"filter_class": NumberFilterES, "argument": Int()},
|
||||||
DateField: {'filter_class': StringFilterES},
|
DateField: {"filter_class": StringFilterES},
|
||||||
KeywordField: {'filter_class': StringFilterES},
|
KeywordField: {"filter_class": StringFilterES},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class OrderEnum(Enum):
|
class OrderEnum(Enum):
|
||||||
"""Order enum to desc-asc"""
|
"""Order enum to desc-asc"""
|
||||||
asc = 'asc'
|
|
||||||
desc = 'desc'
|
asc = "asc"
|
||||||
|
desc = "desc"
|
||||||
|
|
||||||
@property
|
@property
|
||||||
def description(self):
|
def description(self):
|
||||||
"""Description to order enum"""
|
"""Description to order enum"""
|
||||||
if self == OrderEnum.asc:
|
if self == OrderEnum.asc:
|
||||||
return 'Ascendant order'
|
return "Ascendant order"
|
||||||
return 'Descendant order'
|
return "Descendant order"
|
||||||
|
|
||||||
|
|
||||||
class FilterSetESOptions(object):
|
class FilterSetESOptions(object):
|
||||||
"""Basic FilterSetES options to Metadata"""
|
"""Basic FilterSetES options to Metadata"""
|
||||||
|
|
||||||
def __init__(self, options=None):
|
def __init__(self, options=None):
|
||||||
"""
|
"""
|
||||||
The field option is combined with the index to automatically generate
|
The field option is combined with the index to automatically generate
|
||||||
|
@ -123,15 +136,15 @@ class FilterSetESOptions(object):
|
||||||
}
|
}
|
||||||
|
|
||||||
"""
|
"""
|
||||||
self.index = getattr(options, 'index', None)
|
self.index = getattr(options, "index", None)
|
||||||
self.includes = getattr(options, 'includes', None)
|
self.includes = getattr(options, "includes", None)
|
||||||
self.excludes = getattr(options, 'excludes', None)
|
self.excludes = getattr(options, "excludes", None)
|
||||||
self.order_by = getattr(options, 'order_by', None)
|
self.order_by = getattr(options, "order_by", None)
|
||||||
|
|
||||||
if self.index is None:
|
if self.index is None:
|
||||||
raise ValueError('You need provide a Index in Meta.')
|
raise ValueError("You need provide a Index in Meta.")
|
||||||
if self.excludes is None and self.includes is None:
|
if self.excludes is None and self.includes is None:
|
||||||
raise ValueError('You need provide includes or excludes field in Meta.')
|
raise ValueError("You need provide includes or excludes field in Meta.")
|
||||||
|
|
||||||
self.model = self.index._doc_type.model if self.index else None
|
self.model = self.index._doc_type.model if self.index else None
|
||||||
|
|
||||||
|
@ -143,12 +156,12 @@ class FilterSetESMetaclass(type):
|
||||||
"""Get filters declared explicitly in the class"""
|
"""Get filters declared explicitly in the class"""
|
||||||
# get declared as field
|
# get declared as field
|
||||||
declared_filters = mcs.get_declared_filters(bases, attrs)
|
declared_filters = mcs.get_declared_filters(bases, attrs)
|
||||||
attrs['declared_filters'] = declared_filters
|
attrs["declared_filters"] = declared_filters
|
||||||
|
|
||||||
new_class = super(FilterSetESMetaclass, mcs).__new__(mcs, name, bases, attrs)
|
new_class = super(FilterSetESMetaclass, mcs).__new__(mcs, name, bases, attrs)
|
||||||
|
|
||||||
if issubclass(new_class, BaseFilterSet):
|
if issubclass(new_class, BaseFilterSet):
|
||||||
new_class._meta = FilterSetESOptions(getattr(new_class, 'Meta', None))
|
new_class._meta = FilterSetESOptions(getattr(new_class, "Meta", None))
|
||||||
|
|
||||||
# get declared as meta
|
# get declared as meta
|
||||||
meta_filters = mcs.get_meta_filters(new_class._meta)
|
meta_filters = mcs.get_meta_filters(new_class._meta)
|
||||||
|
@ -167,7 +180,7 @@ class FilterSetESMetaclass(type):
|
||||||
if new_class._meta.order_by is not None:
|
if new_class._meta.order_by is not None:
|
||||||
sort_fields = mcs.generate_sort_field(new_class._meta.order_by)
|
sort_fields = mcs.generate_sort_field(new_class._meta.order_by)
|
||||||
sort_type = mcs.create_sort_enum(name, sort_fields)
|
sort_type = mcs.create_sort_enum(name, sort_fields)
|
||||||
base_filters['sort'] = sort_type()
|
base_filters["sort"] = sort_type()
|
||||||
|
|
||||||
new_class.sort_fields = sort_fields
|
new_class.sort_fields = sort_fields
|
||||||
new_class.base_filters = base_filters
|
new_class.base_filters = base_filters
|
||||||
|
@ -193,9 +206,12 @@ class FilterSetESMetaclass(type):
|
||||||
|
|
||||||
# Merge declared filters from base classes
|
# Merge declared filters from base classes
|
||||||
for base in reversed(bases):
|
for base in reversed(bases):
|
||||||
if hasattr(base, 'declared_filters'):
|
if hasattr(base, "declared_filters"):
|
||||||
filters = [(name, field) for name, field in base.declared_filters.items() if name not in attrs] \
|
filters = [
|
||||||
+ filters
|
(name, field)
|
||||||
|
for name, field in base.declared_filters.items()
|
||||||
|
if name not in attrs
|
||||||
|
] + filters
|
||||||
|
|
||||||
return OrderedDict(filters)
|
return OrderedDict(filters)
|
||||||
|
|
||||||
|
@ -232,13 +248,19 @@ class FilterSetESMetaclass(type):
|
||||||
|
|
||||||
if isinstance(meta_includes, dict):
|
if isinstance(meta_includes, dict):
|
||||||
# The lookup_expr are defined in Meta
|
# The lookup_expr are defined in Meta
|
||||||
filter_fields = [(name, index_fields[name], data) for name, data in meta_includes.items()]
|
filter_fields = [
|
||||||
|
(name, index_fields[name], data) for name, data in meta_includes.items()
|
||||||
|
]
|
||||||
elif meta_includes is not None:
|
elif meta_includes is not None:
|
||||||
# The lookup_expr are not defined
|
# The lookup_expr are not defined
|
||||||
filter_fields = [(name, index_fields[name], None) for name in meta_includes]
|
filter_fields = [(name, index_fields[name], None) for name in meta_includes]
|
||||||
else:
|
else:
|
||||||
# No `includes` are declared in meta, so all not `excludes` fields from index will be converted to filters
|
# No `includes` are declared in meta, so all not `excludes` fields from index will be converted to filters
|
||||||
filter_fields = [(name, field, None) for name, field in index_fields.items() if name not in meta_excludes]
|
filter_fields = [
|
||||||
|
(name, field, None)
|
||||||
|
for name, field in index_fields.items()
|
||||||
|
if name not in meta_excludes
|
||||||
|
]
|
||||||
return filter_fields
|
return filter_fields
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
|
@ -251,7 +273,9 @@ class FilterSetESMetaclass(type):
|
||||||
"""
|
"""
|
||||||
index_fields = OrderedDict()
|
index_fields = OrderedDict()
|
||||||
|
|
||||||
properties = field._doc_class._doc_type.mapping.properties._params.get('properties', {})
|
properties = field._doc_class._doc_type.mapping.properties._params.get(
|
||||||
|
"properties", {}
|
||||||
|
)
|
||||||
|
|
||||||
for inner_name, inner_field in properties.items():
|
for inner_name, inner_field in properties.items():
|
||||||
|
|
||||||
|
@ -261,7 +285,9 @@ class FilterSetESMetaclass(type):
|
||||||
|
|
||||||
inner_data = data[inner_name] if data else None
|
inner_data = data[inner_name] if data else None
|
||||||
|
|
||||||
filter_exp = mcs.get_filter_exp(inner_name, inner_field, inner_data, root=name)
|
filter_exp = mcs.get_filter_exp(
|
||||||
|
inner_name, inner_field, inner_data, root=name
|
||||||
|
)
|
||||||
index_fields.update({inner_name: filter_exp})
|
index_fields.update({inner_name: filter_exp})
|
||||||
|
|
||||||
return index_fields
|
return index_fields
|
||||||
|
@ -276,11 +302,11 @@ class FilterSetESMetaclass(type):
|
||||||
:param root: root name
|
:param root: root name
|
||||||
"""
|
"""
|
||||||
field_data = try_dbfield(FILTER_FOR_ESFIELD_DEFAULTS.get, field.__class__) or {}
|
field_data = try_dbfield(FILTER_FOR_ESFIELD_DEFAULTS.get, field.__class__) or {}
|
||||||
filter_class = field_data.get('filter_class')
|
filter_class = field_data.get("filter_class")
|
||||||
|
|
||||||
kwargs = copy.deepcopy(data) if data is not None else {}
|
kwargs = copy.deepcopy(data) if data is not None else {}
|
||||||
|
|
||||||
kwargs['field_name'], kwargs['field_name_es'] = mcs.get_name(name, root, data)
|
kwargs["field_name"], kwargs["field_name_es"] = mcs.get_name(name, root, data)
|
||||||
|
|
||||||
return filter_class(**kwargs)
|
return filter_class(**kwargs)
|
||||||
|
|
||||||
|
@ -292,12 +318,14 @@ class FilterSetESMetaclass(type):
|
||||||
:param data: lookup_expr
|
:param data: lookup_expr
|
||||||
:param root: root name
|
:param root: root name
|
||||||
"""
|
"""
|
||||||
field_name = data.get('field_name', None) if data else None
|
field_name = data.get("field_name", None) if data else None
|
||||||
field_name_es = data.get('field_name_es', None) if data else None
|
field_name_es = data.get("field_name_es", None) if data else None
|
||||||
if not field_name:
|
if not field_name:
|
||||||
field_name = '{root}_{name}'.format(root=root, name=name) if root else name
|
field_name = "{root}_{name}".format(root=root, name=name) if root else name
|
||||||
if not field_name_es:
|
if not field_name_es:
|
||||||
field_name_es = '{root}.{name}'.format(root=root, name=name) if root else name
|
field_name_es = (
|
||||||
|
"{root}.{name}".format(root=root, name=name) if root else name
|
||||||
|
)
|
||||||
return field_name, field_name_es
|
return field_name, field_name_es
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
|
@ -311,8 +339,9 @@ class FilterSetESMetaclass(type):
|
||||||
"""
|
"""
|
||||||
|
|
||||||
sort_enum_name = "{}SortFields".format(name)
|
sort_enum_name = "{}SortFields".format(name)
|
||||||
sort_descriptions = {field: "Sort by {field}".format(field=field) for field in
|
sort_descriptions = {
|
||||||
sort_fields.keys()}
|
field: "Sort by {field}".format(field=field) for field in sort_fields.keys()
|
||||||
|
}
|
||||||
sort_fields = [(field, field) for field in sort_fields.keys()]
|
sort_fields = [(field, field) for field in sort_fields.keys()]
|
||||||
|
|
||||||
class EnumWithDescriptionsType(object):
|
class EnumWithDescriptionsType(object):
|
||||||
|
@ -327,6 +356,7 @@ class FilterSetESMetaclass(type):
|
||||||
|
|
||||||
class SortType(InputObjectType):
|
class SortType(InputObjectType):
|
||||||
"""Sort Type"""
|
"""Sort Type"""
|
||||||
|
|
||||||
order = Field(OrderEnum)
|
order = Field(OrderEnum)
|
||||||
field = Field(enum, required=True)
|
field = Field(enum, required=True)
|
||||||
|
|
||||||
|
@ -349,6 +379,7 @@ class FilterSetESMetaclass(type):
|
||||||
|
|
||||||
class FilterSetES(six.with_metaclass(FilterSetESMetaclass, object)):
|
class FilterSetES(six.with_metaclass(FilterSetESMetaclass, object)):
|
||||||
"""FilterSet specific for ElasticSearch."""
|
"""FilterSet specific for ElasticSearch."""
|
||||||
|
|
||||||
def __init__(self, data, queryset, request):
|
def __init__(self, data, queryset, request):
|
||||||
"""
|
"""
|
||||||
Receiving params necessaries to resolved the data
|
Receiving params necessaries to resolved the data
|
||||||
|
@ -367,9 +398,9 @@ class FilterSetES(six.with_metaclass(FilterSetESMetaclass, object)):
|
||||||
self.es_query.apply_query("query", query_base)
|
self.es_query.apply_query("query", query_base)
|
||||||
self.es_query.apply_query("source", ["id"])
|
self.es_query.apply_query("source", ["id"])
|
||||||
|
|
||||||
if 'sort' in self.data:
|
if "sort" in self.data:
|
||||||
sort_data = self.data['sort'].copy()
|
sort_data = self.data["sort"].copy()
|
||||||
field_name = self.sort_fields[sort_data.pop('field')]
|
field_name = self.sort_fields[sort_data.pop("field")]
|
||||||
self.es_query.apply_query("sort", {field_name: sort_data})
|
self.es_query.apply_query("sort", {field_name: sort_data})
|
||||||
|
|
||||||
return self.es_query
|
return self.es_query
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
|
|
||||||
class FieldResolverObservable(object):
|
class FieldResolverObservable(object):
|
||||||
"""Observable to attach processor by field and resolve it with the field value"""
|
"""Observable to attach processor by field and resolve it with the field value"""
|
||||||
|
|
||||||
|
|
|
@ -5,7 +5,7 @@ from graphene import List, Boolean
|
||||||
|
|
||||||
|
|
||||||
class Processor(object):
|
class Processor(object):
|
||||||
suffix_expr = 'term'
|
suffix_expr = "term"
|
||||||
|
|
||||||
def __init__(self, filter_es, parent_processor=None):
|
def __init__(self, filter_es, parent_processor=None):
|
||||||
"""
|
"""
|
||||||
|
@ -74,7 +74,9 @@ class Processor(object):
|
||||||
result = len(self.filter_es.field_name_es)
|
result = len(self.filter_es.field_name_es)
|
||||||
|
|
||||||
if result > 1:
|
if result > 1:
|
||||||
queries = [self._get_query(name, value) for name in self.filter_es.field_name_es]
|
queries = [
|
||||||
|
self._get_query(name, value) for name in self.filter_es.field_name_es
|
||||||
|
]
|
||||||
return Q("bool", must={"bool": {"should": queries}})
|
return Q("bool", must={"bool": {"should": queries}})
|
||||||
|
|
||||||
return Q("bool", must=self._get_query(self.filter_es.field_name_es[0], value))
|
return Q("bool", must=self._get_query(self.filter_es.field_name_es[0], value))
|
||||||
|
@ -87,17 +89,19 @@ class Processor(object):
|
||||||
:param value: Value passed to this processor
|
:param value: Value passed to this processor
|
||||||
:return: A elasticsearch Query
|
:return: A elasticsearch Query
|
||||||
"""
|
"""
|
||||||
return Q('term', **{name: value})
|
return Q("term", **{name: value})
|
||||||
|
|
||||||
|
|
||||||
class TermProcessor(Processor):
|
class TermProcessor(Processor):
|
||||||
"""Have a same behavior of parent this is only with semantic proposal"""
|
"""Have a same behavior of parent this is only with semantic proposal"""
|
||||||
|
|
||||||
pass
|
pass
|
||||||
|
|
||||||
|
|
||||||
class ContainsProcessor(Processor):
|
class ContainsProcessor(Processor):
|
||||||
"""fuzzy search"""
|
"""fuzzy search"""
|
||||||
suffix_expr = 'contains'
|
|
||||||
|
suffix_expr = "contains"
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _get_query(name, value):
|
def _get_query(name, value):
|
||||||
|
@ -107,16 +111,13 @@ class ContainsProcessor(Processor):
|
||||||
:param value: Value passed to this processor
|
:param value: Value passed to this processor
|
||||||
:return: A elasticsearch Query
|
:return: A elasticsearch Query
|
||||||
"""
|
"""
|
||||||
return Q('match',
|
return Q("match", **{name: {"query": value, "fuzziness": "auto"}})
|
||||||
**{name: {
|
|
||||||
"query": value,
|
|
||||||
"fuzziness": "auto"
|
|
||||||
}})
|
|
||||||
|
|
||||||
|
|
||||||
class RegexProcessor(Processor):
|
class RegexProcessor(Processor):
|
||||||
"""Search based on regular expressions"""
|
"""Search based on regular expressions"""
|
||||||
suffix_expr = 'regex'
|
|
||||||
|
suffix_expr = "regex"
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _get_query(name, value):
|
def _get_query(name, value):
|
||||||
|
@ -126,12 +127,13 @@ class RegexProcessor(Processor):
|
||||||
:param value: Value passed to this processor
|
:param value: Value passed to this processor
|
||||||
:return: A elasticsearch Query
|
:return: A elasticsearch Query
|
||||||
"""
|
"""
|
||||||
return Q('wildcard', **{name: value})
|
return Q("wildcard", **{name: value})
|
||||||
|
|
||||||
|
|
||||||
class PhraseProcessor(Processor):
|
class PhraseProcessor(Processor):
|
||||||
"""Search by the union of many terms"""
|
"""Search by the union of many terms"""
|
||||||
suffix_expr = 'phrase'
|
|
||||||
|
suffix_expr = "phrase"
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _get_query(name, value):
|
def _get_query(name, value):
|
||||||
|
@ -141,15 +143,13 @@ class PhraseProcessor(Processor):
|
||||||
:param value: Value passed to this processor
|
:param value: Value passed to this processor
|
||||||
:return: A elasticsearch Query
|
:return: A elasticsearch Query
|
||||||
"""
|
"""
|
||||||
return Q('match_phrase',
|
return Q("match_phrase", **{name: {"query": value}})
|
||||||
**{name: {
|
|
||||||
"query": value
|
|
||||||
}})
|
|
||||||
|
|
||||||
|
|
||||||
class PrefixProcessor(Processor):
|
class PrefixProcessor(Processor):
|
||||||
"""Search by the prefix of the terms"""
|
"""Search by the prefix of the terms"""
|
||||||
suffix_expr = 'prefix'
|
|
||||||
|
suffix_expr = "prefix"
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _get_query(name, value):
|
def _get_query(name, value):
|
||||||
|
@ -159,15 +159,13 @@ class PrefixProcessor(Processor):
|
||||||
:param value: Value passed to this processor
|
:param value: Value passed to this processor
|
||||||
:return: A elasticsearch Query
|
:return: A elasticsearch Query
|
||||||
"""
|
"""
|
||||||
return Q('match_phrase_prefix',
|
return Q("match_phrase_prefix", **{name: {"query": value}})
|
||||||
**{name: {
|
|
||||||
"query": value
|
|
||||||
}})
|
|
||||||
|
|
||||||
|
|
||||||
class InProcessor(Processor):
|
class InProcessor(Processor):
|
||||||
"""Search by many value for a field"""
|
"""Search by many value for a field"""
|
||||||
suffix_expr = 'in'
|
|
||||||
|
suffix_expr = "in"
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _get_query(name, value):
|
def _get_query(name, value):
|
||||||
|
@ -177,7 +175,7 @@ class InProcessor(Processor):
|
||||||
:param value: Value passed to this processor
|
:param value: Value passed to this processor
|
||||||
:return: A elasticsearch Query
|
:return: A elasticsearch Query
|
||||||
"""
|
"""
|
||||||
return Q('terms', **{name: value})
|
return Q("terms", **{name: value})
|
||||||
|
|
||||||
def get_type(self):
|
def get_type(self):
|
||||||
"""Change base argument by a list of base argument"""
|
"""Change base argument by a list of base argument"""
|
||||||
|
@ -186,7 +184,8 @@ class InProcessor(Processor):
|
||||||
|
|
||||||
class ExitsProcessor(Processor):
|
class ExitsProcessor(Processor):
|
||||||
"""Search by if the field is in the document"""
|
"""Search by if the field is in the document"""
|
||||||
suffix_expr = 'exits'
|
|
||||||
|
suffix_expr = "exits"
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _get_query(name, value):
|
def _get_query(name, value):
|
||||||
|
@ -196,9 +195,9 @@ class ExitsProcessor(Processor):
|
||||||
:param value: Value passed to this processor
|
:param value: Value passed to this processor
|
||||||
:return: A elasticsearch Query
|
:return: A elasticsearch Query
|
||||||
"""
|
"""
|
||||||
return Q('bool', **{
|
return Q(
|
||||||
'must' if value else 'must_not': {'exists': {'field': name}}
|
"bool", **{"must" if value else "must_not": {"exists": {"field": name}}}
|
||||||
})
|
)
|
||||||
|
|
||||||
def get_type(self):
|
def get_type(self):
|
||||||
return Boolean()
|
return Boolean()
|
||||||
|
@ -206,7 +205,8 @@ class ExitsProcessor(Processor):
|
||||||
|
|
||||||
class LteProcessor(Processor):
|
class LteProcessor(Processor):
|
||||||
"""Search by range less than"""
|
"""Search by range less than"""
|
||||||
suffix_expr = 'lte'
|
|
||||||
|
suffix_expr = "lte"
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _get_query(name, value):
|
def _get_query(name, value):
|
||||||
|
@ -216,12 +216,13 @@ class LteProcessor(Processor):
|
||||||
:param value: Value passed to this processor
|
:param value: Value passed to this processor
|
||||||
:return: A elasticsearch Query
|
:return: A elasticsearch Query
|
||||||
"""
|
"""
|
||||||
return Q('range', **{name: {'lte': value}})
|
return Q("range", **{name: {"lte": value}})
|
||||||
|
|
||||||
|
|
||||||
class GteProcessor(Processor):
|
class GteProcessor(Processor):
|
||||||
"""Search by range greater than"""
|
"""Search by range greater than"""
|
||||||
suffix_expr = 'gte'
|
|
||||||
|
suffix_expr = "gte"
|
||||||
|
|
||||||
@staticmethod
|
@staticmethod
|
||||||
def _get_query(name, value):
|
def _get_query(name, value):
|
||||||
|
@ -231,7 +232,7 @@ class GteProcessor(Processor):
|
||||||
:param value: Value passed to this processor
|
:param value: Value passed to this processor
|
||||||
:return: A elasticsearch Query
|
:return: A elasticsearch Query
|
||||||
"""
|
"""
|
||||||
return Q("range", **{name: {'gte': value}})
|
return Q("range", **{name: {"gte": value}})
|
||||||
|
|
||||||
|
|
||||||
class ProcessorFactory(object):
|
class ProcessorFactory(object):
|
||||||
|
@ -261,4 +262,4 @@ class ProcessorFactory(object):
|
||||||
return processor_class(filter_es, parent_processor)
|
return processor_class(filter_es, parent_processor)
|
||||||
|
|
||||||
else:
|
else:
|
||||||
raise ValueError('We do not have processor: %s.' % variant)
|
raise ValueError("We do not have processor: %s." % variant)
|
||||||
|
|
|
@ -1,4 +1,3 @@
|
||||||
|
|
||||||
class QuerysetProxy(object):
|
class QuerysetProxy(object):
|
||||||
"""Bridge to Queryset through ES query"""
|
"""Bridge to Queryset through ES query"""
|
||||||
|
|
||||||
|
|
|
@ -38,7 +38,10 @@ def generate_query(field, query_str):
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
}
|
}
|
||||||
""" % (field, query_str)
|
""" % (
|
||||||
|
field,
|
||||||
|
query_str,
|
||||||
|
)
|
||||||
return query
|
return query
|
||||||
|
|
||||||
|
|
||||||
|
@ -48,14 +51,22 @@ def filter_generation(field, query_str, expected_arguments, method_to_mock="quer
|
||||||
query = generate_query(field, query_str)
|
query = generate_query(field, query_str)
|
||||||
|
|
||||||
mock_count = mock.Mock(return_value=3)
|
mock_count = mock.Mock(return_value=3)
|
||||||
mock_slice = mock.Mock(return_value=mock.Mock(to_queryset=mock.Mock(
|
mock_slice = mock.Mock(
|
||||||
return_value=Article.objects.filter(pk__in=[a1.id, a2.id])
|
return_value=mock.Mock(
|
||||||
)))
|
to_queryset=mock.Mock(
|
||||||
|
return_value=Article.objects.filter(pk__in=[a1.id, a2.id])
|
||||||
|
)
|
||||||
|
)
|
||||||
|
)
|
||||||
mock_query = mock.Mock(return_value=ArticleDocument.search())
|
mock_query = mock.Mock(return_value=ArticleDocument.search())
|
||||||
|
|
||||||
with mock.patch('django_elasticsearch_dsl.search.Search.count', mock_count), \
|
with mock.patch(
|
||||||
mock.patch('django_elasticsearch_dsl.search.Search.__getitem__', mock_slice), \
|
"django_elasticsearch_dsl.search.Search.count", mock_count
|
||||||
mock.patch("elasticsearch_dsl.Search.%s" % method_to_mock, mock_query):
|
), mock.patch(
|
||||||
|
"django_elasticsearch_dsl.search.Search.__getitem__", mock_slice
|
||||||
|
), mock.patch(
|
||||||
|
"elasticsearch_dsl.Search.%s" % method_to_mock, mock_query
|
||||||
|
):
|
||||||
schema = Schema(query=ESFilterQuery)
|
schema = Schema(query=ESFilterQuery)
|
||||||
result = schema.execute(query)
|
result = schema.execute(query)
|
||||||
|
|
||||||
|
|
|
@ -7,107 +7,119 @@ from graphene_django.elasticsearch.filter import filters
|
||||||
from graphene_django.elasticsearch.filter.fields import DjangoESFilterConnectionField
|
from graphene_django.elasticsearch.filter.fields import DjangoESFilterConnectionField
|
||||||
from graphene_django.elasticsearch.filter.filterset import FilterSetES
|
from graphene_django.elasticsearch.filter.filterset import FilterSetES
|
||||||
|
|
||||||
ads_index = Index('articles')
|
ads_index = Index("articles")
|
||||||
|
|
||||||
|
|
||||||
@ads_index.doc_type
|
@ads_index.doc_type
|
||||||
class ArticleDocument(DocType):
|
class ArticleDocument(DocType):
|
||||||
"""Article document describing Index"""
|
"""Article document describing Index"""
|
||||||
|
|
||||||
class Meta(object):
|
class Meta(object):
|
||||||
"""Metaclass config"""
|
"""Metaclass config"""
|
||||||
|
|
||||||
model = Article
|
model = Article
|
||||||
fields = [
|
fields = ["id", "headline", "pub_date", "pub_date_time", "lang", "importance"]
|
||||||
'id',
|
|
||||||
'headline',
|
|
||||||
'pub_date',
|
|
||||||
'pub_date_time',
|
|
||||||
'lang',
|
|
||||||
'importance',
|
|
||||||
]
|
|
||||||
related_models = (Reporter,)
|
related_models = (Reporter,)
|
||||||
|
|
||||||
reporter = fields.ObjectField(properties={
|
reporter = fields.ObjectField(
|
||||||
'id': fields.IntegerField(),
|
properties={
|
||||||
'first_name': fields.KeywordField(),
|
"id": fields.IntegerField(),
|
||||||
'email': fields.KeywordField(),
|
"first_name": fields.KeywordField(),
|
||||||
})
|
"email": fields.KeywordField(),
|
||||||
|
}
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class ArticleFilterESAsField(FilterSetES):
|
class ArticleFilterESAsField(FilterSetES):
|
||||||
"""Article Filter for ES"""
|
"""Article Filter for ES"""
|
||||||
|
|
||||||
class Meta(object):
|
class Meta(object):
|
||||||
"""Metaclass data"""
|
"""Metaclass data"""
|
||||||
|
|
||||||
index = ArticleDocument
|
index = ArticleDocument
|
||||||
includes = []
|
includes = []
|
||||||
order_by = ['id']
|
order_by = ["id"]
|
||||||
|
|
||||||
headline = filters.StringFilterES(field_name='headline', lookup_expressions=['term', 'contains'])
|
headline = filters.StringFilterES(
|
||||||
|
field_name="headline", lookup_expressions=["term", "contains"]
|
||||||
|
)
|
||||||
|
|
||||||
|
|
||||||
class ArticleFilterESInMeta(FilterSetES):
|
class ArticleFilterESInMeta(FilterSetES):
|
||||||
"""Article Filter for ES"""
|
"""Article Filter for ES"""
|
||||||
|
|
||||||
class Meta(object):
|
class Meta(object):
|
||||||
"""Metaclass data"""
|
"""Metaclass data"""
|
||||||
|
|
||||||
index = ArticleDocument
|
index = ArticleDocument
|
||||||
includes = ['id', 'headline']
|
includes = ["id", "headline"]
|
||||||
order_by = {'id': 'es_id'}
|
order_by = {"id": "es_id"}
|
||||||
|
|
||||||
|
|
||||||
class ArticleFilterESInMetaDict(FilterSetES):
|
class ArticleFilterESInMetaDict(FilterSetES):
|
||||||
"""Article Filter for ES"""
|
"""Article Filter for ES"""
|
||||||
|
|
||||||
class Meta(object):
|
class Meta(object):
|
||||||
"""Metaclass data"""
|
"""Metaclass data"""
|
||||||
|
|
||||||
index = ArticleDocument
|
index = ArticleDocument
|
||||||
includes = {
|
includes = {
|
||||||
'headline': {
|
"headline": {
|
||||||
'lookup_expressions': [
|
"lookup_expressions": [
|
||||||
'term',
|
"term",
|
||||||
'contains',
|
"contains",
|
||||||
'regex',
|
"regex",
|
||||||
'phrase',
|
"phrase",
|
||||||
'prefix',
|
"prefix",
|
||||||
'in',
|
"in",
|
||||||
'exits',
|
"exits",
|
||||||
'lte',
|
"lte",
|
||||||
'gte',
|
"gte",
|
||||||
]
|
]
|
||||||
},
|
},
|
||||||
'reporter': {},
|
"reporter": {},
|
||||||
}
|
}
|
||||||
|
|
||||||
|
|
||||||
class ArticleFilterMultiField(FilterSetES):
|
class ArticleFilterMultiField(FilterSetES):
|
||||||
"""Article Filter for ES"""
|
"""Article Filter for ES"""
|
||||||
|
|
||||||
class Meta(object):
|
class Meta(object):
|
||||||
"""Metaclass data"""
|
"""Metaclass data"""
|
||||||
|
|
||||||
index = ArticleDocument
|
index = ArticleDocument
|
||||||
includes = []
|
includes = []
|
||||||
|
|
||||||
headline = filters.StringFilterES(
|
headline = filters.StringFilterES(
|
||||||
field_name='contain',
|
field_name="contain",
|
||||||
field_name_es=['headline', 'lang'],
|
field_name_es=["headline", "lang"],
|
||||||
lookup_expressions=['contains']
|
lookup_expressions=["contains"],
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
class ArticleFilterGenerateAll(FilterSetES):
|
class ArticleFilterGenerateAll(FilterSetES):
|
||||||
"""Article Filter for ES"""
|
"""Article Filter for ES"""
|
||||||
|
|
||||||
class Meta(object):
|
class Meta(object):
|
||||||
"""Metaclass data"""
|
"""Metaclass data"""
|
||||||
|
|
||||||
index = ArticleDocument
|
index = ArticleDocument
|
||||||
excludes = []
|
excludes = []
|
||||||
|
|
||||||
|
|
||||||
class ArticleFilterExcludes(FilterSetES):
|
class ArticleFilterExcludes(FilterSetES):
|
||||||
"""Article Filter for ES"""
|
"""Article Filter for ES"""
|
||||||
|
|
||||||
class Meta(object):
|
class Meta(object):
|
||||||
"""Metaclass data"""
|
"""Metaclass data"""
|
||||||
|
|
||||||
index = ArticleDocument
|
index = ArticleDocument
|
||||||
excludes = ['headline']
|
excludes = ["headline"]
|
||||||
|
|
||||||
|
|
||||||
class ESFilterQuery(ObjectType):
|
class ESFilterQuery(ObjectType):
|
||||||
"""A query for ES fields"""
|
"""A query for ES fields"""
|
||||||
|
|
||||||
articles_as_field = DjangoESFilterConnectionField(
|
articles_as_field = DjangoESFilterConnectionField(
|
||||||
ArticleNode, filterset_class=ArticleFilterESAsField
|
ArticleNode, filterset_class=ArticleFilterESAsField
|
||||||
)
|
)
|
||||||
|
|
|
@ -5,7 +5,10 @@ from graphene_django.elasticsearch.filter.fields import DjangoESFilterConnection
|
||||||
from graphene_django.elasticsearch.filter.filterset import FilterSetES
|
from graphene_django.elasticsearch.filter.filterset import FilterSetES
|
||||||
from graphene_django.filter.tests.test_fields import ArticleNode
|
from graphene_django.filter.tests.test_fields import ArticleNode
|
||||||
from graphene_django.elasticsearch.tests.filters import ArticleDocument
|
from graphene_django.elasticsearch.tests.filters import ArticleDocument
|
||||||
from graphene_django.utils import DJANGO_FILTER_INSTALLED, DJANGO_ELASTICSEARCH_DSL_INSTALLED
|
from graphene_django.utils import (
|
||||||
|
DJANGO_FILTER_INSTALLED,
|
||||||
|
DJANGO_ELASTICSEARCH_DSL_INSTALLED,
|
||||||
|
)
|
||||||
|
|
||||||
pytestmark = []
|
pytestmark = []
|
||||||
|
|
||||||
|
@ -25,12 +28,9 @@ def test_filter_bad_processor():
|
||||||
|
|
||||||
class Meta(object):
|
class Meta(object):
|
||||||
"""Metaclass data"""
|
"""Metaclass data"""
|
||||||
|
|
||||||
index = ArticleDocument
|
index = ArticleDocument
|
||||||
includes = {
|
includes = {"headline": {"lookup_expressions": ["bad_processor"]}}
|
||||||
'headline': {
|
|
||||||
'lookup_expressions': ['bad_processor']
|
|
||||||
}
|
|
||||||
}
|
|
||||||
|
|
||||||
with raises(ValueError) as error_info:
|
with raises(ValueError) as error_info:
|
||||||
DjangoESFilterConnectionField(
|
DjangoESFilterConnectionField(
|
||||||
|
@ -42,33 +42,28 @@ def test_filter_bad_processor():
|
||||||
|
|
||||||
def test_filter_field_without_filterset_class():
|
def test_filter_field_without_filterset_class():
|
||||||
with raises(ValueError) as error_info:
|
with raises(ValueError) as error_info:
|
||||||
DjangoESFilterConnectionField(
|
DjangoESFilterConnectionField(ArticleNode)
|
||||||
ArticleNode
|
|
||||||
)
|
|
||||||
|
|
||||||
assert "filterset_class" in str(error_info.value)
|
assert "filterset_class" in str(error_info.value)
|
||||||
|
|
||||||
|
|
||||||
def test_filter_field_with_fields():
|
def test_filter_field_with_fields():
|
||||||
with raises(ValueError) as error_info:
|
with raises(ValueError) as error_info:
|
||||||
DjangoESFilterConnectionField(
|
DjangoESFilterConnectionField(ArticleNode, fields=["headline"])
|
||||||
ArticleNode, fields=['headline']
|
|
||||||
)
|
|
||||||
|
|
||||||
assert "fields" in str(error_info.value)
|
assert "fields" in str(error_info.value)
|
||||||
|
|
||||||
|
|
||||||
def test_filter_field_with_order_by():
|
def test_filter_field_with_order_by():
|
||||||
with raises(ValueError) as error_info:
|
with raises(ValueError) as error_info:
|
||||||
DjangoESFilterConnectionField(
|
DjangoESFilterConnectionField(ArticleNode, order_by=["headline"])
|
||||||
ArticleNode, order_by=['headline']
|
|
||||||
)
|
|
||||||
|
|
||||||
assert "order_by" in str(error_info.value)
|
assert "order_by" in str(error_info.value)
|
||||||
|
|
||||||
|
|
||||||
def test_filter_filterset_without_index():
|
def test_filter_filterset_without_index():
|
||||||
with raises(ValueError) as error_info:
|
with raises(ValueError) as error_info:
|
||||||
|
|
||||||
class ArticleFilterBadProcessor(FilterSetES):
|
class ArticleFilterBadProcessor(FilterSetES):
|
||||||
"""Article Filter for ES"""
|
"""Article Filter for ES"""
|
||||||
|
|
||||||
|
@ -84,11 +79,13 @@ def test_filter_filterset_without_index():
|
||||||
|
|
||||||
def test_filter_filterset_without_xcludes():
|
def test_filter_filterset_without_xcludes():
|
||||||
with raises(ValueError) as error_info:
|
with raises(ValueError) as error_info:
|
||||||
|
|
||||||
class ArticleFilterBadProcessor(FilterSetES):
|
class ArticleFilterBadProcessor(FilterSetES):
|
||||||
"""Article Filter for ES"""
|
"""Article Filter for ES"""
|
||||||
|
|
||||||
class Meta(object):
|
class Meta(object):
|
||||||
"""Metaclass data"""
|
"""Metaclass data"""
|
||||||
|
|
||||||
index = ArticleDocument
|
index = ArticleDocument
|
||||||
|
|
||||||
DjangoESFilterConnectionField(
|
DjangoESFilterConnectionField(
|
||||||
|
|
|
@ -2,9 +2,15 @@ import pytest
|
||||||
from elasticsearch_dsl.query import Bool, Match, Term
|
from elasticsearch_dsl.query import Bool, Match, Term
|
||||||
from graphene import Schema
|
from graphene import Schema
|
||||||
|
|
||||||
from graphene_django.elasticsearch.tests.commons import filter_generation, generate_query
|
from graphene_django.elasticsearch.tests.commons import (
|
||||||
|
filter_generation,
|
||||||
|
generate_query,
|
||||||
|
)
|
||||||
from graphene_django.elasticsearch.tests.filters import ESFilterQuery
|
from graphene_django.elasticsearch.tests.filters import ESFilterQuery
|
||||||
from graphene_django.utils import DJANGO_FILTER_INSTALLED, DJANGO_ELASTICSEARCH_DSL_INSTALLED
|
from graphene_django.utils import (
|
||||||
|
DJANGO_FILTER_INSTALLED,
|
||||||
|
DJANGO_ELASTICSEARCH_DSL_INSTALLED,
|
||||||
|
)
|
||||||
|
|
||||||
pytestmark = []
|
pytestmark = []
|
||||||
|
|
||||||
|
@ -21,92 +27,100 @@ pytestmark.append(pytest.mark.django_db)
|
||||||
def test_filter_string():
|
def test_filter_string():
|
||||||
filter_generation(
|
filter_generation(
|
||||||
"articlesAsField",
|
"articlesAsField",
|
||||||
"headline: \"A text\"",
|
'headline: "A text"',
|
||||||
Bool(must=[Match(headline={'query': 'A text', 'fuzziness': 'auto'})]),
|
Bool(must=[Match(headline={"query": "A text", "fuzziness": "auto"})]),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_filter_string_date():
|
def test_filter_string_date():
|
||||||
filter_generation(
|
filter_generation(
|
||||||
"articlesAsField",
|
"articlesAsField",
|
||||||
"headline: \"A text\"",
|
'headline: "A text"',
|
||||||
Bool(must=[Match(headline={'query': 'A text', 'fuzziness': 'auto'})]),
|
Bool(must=[Match(headline={"query": "A text", "fuzziness": "auto"})]),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_filter_as_field_order_by():
|
def test_filter_as_field_order_by():
|
||||||
filter_generation(
|
filter_generation(
|
||||||
"articlesAsField",
|
"articlesAsField",
|
||||||
"headline: \"A text\", sort:{order:desc, field:id}",
|
'headline: "A text", sort:{order:desc, field:id}',
|
||||||
{'id': {'order': 'desc'}},
|
{"id": {"order": "desc"}},
|
||||||
"sort"
|
"sort",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_filter_as_field_order_by_dict():
|
def test_filter_as_field_order_by_dict():
|
||||||
filter_generation(
|
filter_generation(
|
||||||
"articlesInMeta",
|
"articlesInMeta",
|
||||||
"headline: \"A text\", sort:{order:desc, field:id}",
|
'headline: "A text", sort:{order:desc, field:id}',
|
||||||
{'es_id': {'order': 'desc'}},
|
{"es_id": {"order": "desc"}},
|
||||||
"sort"
|
"sort",
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_filter_in_meta():
|
def test_filter_in_meta():
|
||||||
filter_generation(
|
filter_generation(
|
||||||
"articlesInMeta",
|
"articlesInMeta",
|
||||||
"headline: \"A text\"",
|
'headline: "A text"',
|
||||||
Bool(must=[Match(headline={'query': 'A text', 'fuzziness': 'auto'})]),
|
Bool(must=[Match(headline={"query": "A text", "fuzziness": "auto"})]),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_filter_in_meta_dict():
|
def test_filter_in_meta_dict():
|
||||||
filter_generation(
|
filter_generation(
|
||||||
"articlesInMetaDict",
|
"articlesInMetaDict",
|
||||||
"headline: \"A text\"",
|
'headline: "A text"',
|
||||||
Bool(must=[Match(headline={'query': 'A text', 'fuzziness': 'auto'})]),
|
Bool(must=[Match(headline={"query": "A text", "fuzziness": "auto"})]),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_filter_in_meta_dict_foreign():
|
def test_filter_in_meta_dict_foreign():
|
||||||
filter_generation(
|
filter_generation(
|
||||||
"articlesInMetaDict",
|
"articlesInMetaDict",
|
||||||
"reporterEmail: \"A mail\"",
|
'reporterEmail: "A mail"',
|
||||||
Bool(must=[Match(reporter__email={'query': 'A mail', 'fuzziness': 'auto'})]),
|
Bool(must=[Match(reporter__email={"query": "A mail", "fuzziness": "auto"})]),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_filter_in_multi_field():
|
def test_filter_in_multi_field():
|
||||||
filter_generation(
|
filter_generation(
|
||||||
"articlesInMultiField",
|
"articlesInMultiField",
|
||||||
"contain: \"A text\"",
|
'contain: "A text"',
|
||||||
Bool(must=[Bool(should=[
|
Bool(
|
||||||
Match(headline={'query': 'A text', 'fuzziness': 'auto'}),
|
must=[
|
||||||
Match(lang={'query': 'A text', 'fuzziness': 'auto'})
|
Bool(
|
||||||
])]),
|
should=[
|
||||||
|
Match(headline={"query": "A text", "fuzziness": "auto"}),
|
||||||
|
Match(lang={"query": "A text", "fuzziness": "auto"}),
|
||||||
|
]
|
||||||
|
)
|
||||||
|
]
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_filter_generating_all():
|
def test_filter_generating_all():
|
||||||
filter_generation(
|
filter_generation(
|
||||||
"articlesInGenerateAll",
|
"articlesInGenerateAll",
|
||||||
"headline: \"A text\", "
|
'headline: "A text", '
|
||||||
"pubDate: \"0000-00-00\", "
|
'pubDate: "0000-00-00", '
|
||||||
"pubDateTime: \"00:00:00\", "
|
'pubDateTime: "00:00:00", '
|
||||||
"lang: \"es\", "
|
'lang: "es", '
|
||||||
"importance: 1, ",
|
"importance: 1, ",
|
||||||
Bool(must=[
|
Bool(
|
||||||
Match(headline={'query': 'A text', 'fuzziness': 'auto'}),
|
must=[
|
||||||
Match(pub_date={'query': '0000-00-00', 'fuzziness': 'auto'}),
|
Match(headline={"query": "A text", "fuzziness": "auto"}),
|
||||||
Match(pub_date_time={'query': '00:00:00', 'fuzziness': 'auto'}),
|
Match(pub_date={"query": "0000-00-00", "fuzziness": "auto"}),
|
||||||
Match(lang={'query': 'es', 'fuzziness': 'auto'}),
|
Match(pub_date_time={"query": "00:00:00", "fuzziness": "auto"}),
|
||||||
Term(importance=1)
|
Match(lang={"query": "es", "fuzziness": "auto"}),
|
||||||
]),
|
Term(importance=1),
|
||||||
|
]
|
||||||
|
),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_filter_generating_exclude():
|
def test_filter_generating_exclude():
|
||||||
query = generate_query("articlesInExcludes", "headline: \"A text\", ")
|
query = generate_query("articlesInExcludes", 'headline: "A text", ')
|
||||||
|
|
||||||
schema = Schema(query=ESFilterQuery)
|
schema = Schema(query=ESFilterQuery)
|
||||||
result = schema.execute(query)
|
result = schema.execute(query)
|
||||||
|
|
|
@ -1,8 +1,20 @@
|
||||||
import pytest
|
import pytest
|
||||||
from elasticsearch_dsl.query import Bool, Term, Wildcard, MatchPhrase, MatchPhrasePrefix, Range, Terms, Exists
|
from elasticsearch_dsl.query import (
|
||||||
|
Bool,
|
||||||
|
Term,
|
||||||
|
Wildcard,
|
||||||
|
MatchPhrase,
|
||||||
|
MatchPhrasePrefix,
|
||||||
|
Range,
|
||||||
|
Terms,
|
||||||
|
Exists,
|
||||||
|
)
|
||||||
|
|
||||||
from graphene_django.elasticsearch.tests.commons import filter_generation
|
from graphene_django.elasticsearch.tests.commons import filter_generation
|
||||||
from graphene_django.utils import DJANGO_FILTER_INSTALLED, DJANGO_ELASTICSEARCH_DSL_INSTALLED
|
from graphene_django.utils import (
|
||||||
|
DJANGO_FILTER_INSTALLED,
|
||||||
|
DJANGO_ELASTICSEARCH_DSL_INSTALLED,
|
||||||
|
)
|
||||||
|
|
||||||
pytestmark = []
|
pytestmark = []
|
||||||
|
|
||||||
|
@ -19,40 +31,40 @@ pytestmark.append(pytest.mark.django_db)
|
||||||
def test_processor_term():
|
def test_processor_term():
|
||||||
filter_generation(
|
filter_generation(
|
||||||
"articlesInMetaDict",
|
"articlesInMetaDict",
|
||||||
"headlineTerm: \"A text\"",
|
'headlineTerm: "A text"',
|
||||||
Bool(must=[Term(headline='A text')]),
|
Bool(must=[Term(headline="A text")]),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_processor_regex():
|
def test_processor_regex():
|
||||||
filter_generation(
|
filter_generation(
|
||||||
"articlesInMetaDict",
|
"articlesInMetaDict",
|
||||||
"headlineRegex: \"A text\"",
|
'headlineRegex: "A text"',
|
||||||
Bool(must=[Wildcard(headline='A text')]),
|
Bool(must=[Wildcard(headline="A text")]),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_processor_phrase():
|
def test_processor_phrase():
|
||||||
filter_generation(
|
filter_generation(
|
||||||
"articlesInMetaDict",
|
"articlesInMetaDict",
|
||||||
"headlinePhrase: \"A text\"",
|
'headlinePhrase: "A text"',
|
||||||
Bool(must=[MatchPhrase(headline={'query': 'A text'})]),
|
Bool(must=[MatchPhrase(headline={"query": "A text"})]),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_processor_prefix():
|
def test_processor_prefix():
|
||||||
filter_generation(
|
filter_generation(
|
||||||
"articlesInMetaDict",
|
"articlesInMetaDict",
|
||||||
"headlinePrefix: \"A text\"",
|
'headlinePrefix: "A text"',
|
||||||
Bool(must=[MatchPhrasePrefix(headline={'query': 'A text'})]),
|
Bool(must=[MatchPhrasePrefix(headline={"query": "A text"})]),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_processor_in():
|
def test_processor_in():
|
||||||
filter_generation(
|
filter_generation(
|
||||||
"articlesInMetaDict",
|
"articlesInMetaDict",
|
||||||
"headlineIn: [\"A text 1\", \"A text 2\"]",
|
'headlineIn: ["A text 1", "A text 2"]',
|
||||||
Bool(must=[Terms(headline=['A text 1', 'A text 2'])]),
|
Bool(must=[Terms(headline=["A text 1", "A text 2"])]),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
|
@ -60,21 +72,21 @@ def test_processor_exits():
|
||||||
filter_generation(
|
filter_generation(
|
||||||
"articlesInMetaDict",
|
"articlesInMetaDict",
|
||||||
"headlineExits: true",
|
"headlineExits: true",
|
||||||
Bool(must=[Bool(must=[Exists(field='headline')])]),
|
Bool(must=[Bool(must=[Exists(field="headline")])]),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_processor_lte():
|
def test_processor_lte():
|
||||||
filter_generation(
|
filter_generation(
|
||||||
"articlesInMetaDict",
|
"articlesInMetaDict",
|
||||||
"headlineLte: \"A text\"",
|
'headlineLte: "A text"',
|
||||||
Bool(must=Range(headline={'lte': 'A text'})),
|
Bool(must=Range(headline={"lte": "A text"})),
|
||||||
)
|
)
|
||||||
|
|
||||||
|
|
||||||
def test_processor_gte():
|
def test_processor_gte():
|
||||||
filter_generation(
|
filter_generation(
|
||||||
"articlesInMetaDict",
|
"articlesInMetaDict",
|
||||||
"headlineGte: \"A text\"",
|
'headlineGte: "A text"',
|
||||||
Bool(must=Range(headline={'gte': 'A text'})),
|
Bool(must=Range(headline={"gte": "A text"})),
|
||||||
)
|
)
|
||||||
|
|
Loading…
Reference in New Issue
Block a user