diff --git a/graphene_django/elasticsearch/filter/fields.py b/graphene_django/elasticsearch/filter/fields.py index dfce2c8..cd21fd0 100644 --- a/graphene_django/elasticsearch/filter/fields.py +++ b/graphene_django/elasticsearch/filter/fields.py @@ -1,13 +1,13 @@ -from elasticsearch_dsl.query import Query - -from graphene_django.elasticsearch.filter.bridges import ManagerBridge +from graphene_django.elasticsearch.filter.proxy import ManagerProxy from graphene_django.filter import DjangoFilterConnectionField class DjangoESFilterConnectionField(DjangoFilterConnectionField): """A Field to replace DjangoFilterConnectionField manager by QuerysetBridge""" def __init__(self, object_type, *args, **kwargs): - """Validating field allowed for this connection""" + """Validating field allowed for this connection + :param object_type: DjangoObjectType + """ fields = kwargs.get('fields', None) if fields is not None: raise ValueError('DjangoESFilterConnectionField do not permit argument fields yet.') @@ -22,14 +22,8 @@ class DjangoESFilterConnectionField(DjangoFilterConnectionField): super(DjangoESFilterConnectionField, self).__init__(object_type, *args, **kwargs) - self.manager = ManagerBridge(search_manager=self.filterset_class._meta.index.search) + self.manager = ManagerProxy(search_manager=self.filterset_class._meta.index.search) def get_manager(self): """Returning a ManagerBridge to replace the direct use over the Model manager""" return self.manager - - def merge_querysets(cls, default_queryset, queryset): - """Merge ES queries""" - if isinstance(default_queryset, Query): - return default_queryset & queryset - return default_queryset.query(queryset) diff --git a/graphene_django/elasticsearch/filter/filters.py b/graphene_django/elasticsearch/filter/filters.py index fc34c0c..4044a99 100644 --- a/graphene_django/elasticsearch/filter/filters.py +++ b/graphene_django/elasticsearch/filter/filters.py @@ -11,8 +11,11 @@ class FilterES(object): def __init__(self, field_name, field_name_es=None, lookup_expressions=None, default_processor=None, argument=None): """ - :param name: Name of the field. This is the name that will be exported. - :param attr: Path to the index attr that will be used as filter. + :param field_name: Name of the field. This is the name that will be exported. + :param field_name_es: Path to the index attr that will be used as filter. + :param lookup_expressions: List of processor. + :param default_processor: Processor by default used when lookup_expressions in empty. + :param argument: Gaphene type base for this field. """ self.field_name = field_name @@ -36,10 +39,15 @@ class FilterES(object): else: self.processor = self.build_processor(self.default_processor) - self.fields = self.processor.generate_field() self.argument = argument or self.default_argument + self.fields = self.processor.generate_field() def build_processor(self, variant): + """ + Create a new processor based on the name + :param variant: Processor name + :return: Returns a Processor instance + """ processor_class = PROCESSORS[variant] return processor_class(self, self.processor) @@ -51,13 +59,6 @@ class FilterES(object): """ return self.processor.generate_es_query(arguments) - def Argument(self): - """ - Defining graphene Argument type for this filter - :return: A Argument type - """ - return self.argument.Argument() - class StringFilterES(FilterES): """String Fields specific to ElasticSearch.""" diff --git a/graphene_django/elasticsearch/filter/filterset.py b/graphene_django/elasticsearch/filter/filterset.py index 94199b7..012bc1c 100644 --- a/graphene_django/elasticsearch/filter/filterset.py +++ b/graphene_django/elasticsearch/filter/filterset.py @@ -20,7 +20,7 @@ FILTER_FOR_ESFIELD_DEFAULTS = { IntegerField: {'filter_class': NumberFilterES}, FloatField: {'filter_class': NumberFilterES, 'extra': { - 'argument': Int() + 'argument': Float() }}, LongField: {'filter_class': NumberFilterES, 'extra': { @@ -32,7 +32,7 @@ FILTER_FOR_ESFIELD_DEFAULTS = { }}, DoubleField: {'filter_class': NumberFilterES, 'extra': { - 'argument': Float() + 'argument': Int() }}, DateField: {'filter_class': StringFilterES}, KeywordField: {'filter_class': StringFilterES}, @@ -169,7 +169,7 @@ class FilterSetESMetaclass(type): # recollecting registered graphene fields base_filters = OrderedDict() - for name, filter_field in six.iteritems(declared_filters): + for filter_name, filter_field in six.iteritems(declared_filters): base_filters.update(filter_field.fields) # adding sort field @@ -212,13 +212,13 @@ class FilterSetESMetaclass(type): def get_meta_filters(mcs, meta): """ Get filters from Meta configuration + :param meta: A FilterSetESOptions instance with meta options :return: Field extracted from index and from the FilterSetES. """ index_fields = mcs.get_index_fields(meta) meta_filters = OrderedDict() for name, index_field, data in index_fields: - filter_class = mcs.get_filter_exp(name, index_field, data) meta_filters.update({name: filter_class}) @@ -228,6 +228,7 @@ class FilterSetESMetaclass(type): def get_index_fields(mcs, meta): """ Get fields from index that appears in the meta class configuration of the filter_set + :param meta: A FilterSetESOptions instance with meta options :return: Tuple of (name, field, lookup_expr) describing name of the field, ES class of the field and lookup_expr """ index_fields = meta.index._doc_type._fields() @@ -247,7 +248,12 @@ class FilterSetESMetaclass(type): @classmethod def get_filter_object(mcs, name, field, data): - """Get filters from ObjectField""" + """ + Get filters from ObjectField + :param name: name of the field + :param field: ES index field + :param data: lookup_expr + """ index_fields = [] properties = field._doc_class._doc_type.mapping.properties._params.get('properties', {}) @@ -264,7 +270,13 @@ class FilterSetESMetaclass(type): @classmethod def get_filter_exp(mcs, name, field, data=None, root=None): - """Initialize filter""" + """ + Initialize filter + :param name: name of the field + :param field: ES index field + :param data: lookup_expr + :param root: root name + """ field_data = try_dbfield(FILTER_FOR_ESFIELD_DEFAULTS.get, field.__class__) or {} filter_class = field_data.get('filter_class') @@ -274,15 +286,18 @@ class FilterSetESMetaclass(type): # Get lookup_expr from configuration if data and 'lookup_expressions' in data: kwargs['lookup_expressions'] = set(data['lookup_expressions']) - elif 'lookup_expressions' in kwargs: - kwargs['lookup_expressions'] = set(kwargs['lookup_expressions']) kwargs['field_name'], kwargs['field_name_es'] = mcs.get_name(name, root, data) return filter_class(**kwargs) @staticmethod def get_name(name, root, data): - """Get names of the field and the path to resolve it""" + """ + Get names of the field and the path to resolve it + :param name: name of the field + :param data: lookup_expr + :param root: root name + """ field_name = data.get('field_name', None) if data else None field_name_es = data.get('field_name_es', None) if data else None if not field_name: @@ -297,6 +312,8 @@ class FilterSetESMetaclass(type): Create enum to sort by fields. As graphene is typed, it is necessary generate a Enum by Field to have inside, the document fields allowed to be ordered + :param name: name of the field + :param sort_fields: Field allowed to be ordered """ sort_enum_name = "{}SortFields".format(name) @@ -325,10 +342,11 @@ class FilterSetESMetaclass(type): @staticmethod def generate_sort_field(order_by): - """To normalize the sort field data""" - if not order_by: - sort_fields = {} - elif isinstance(order_by, dict): + """ + To normalize the sort field data + :param order_by: Sort data + """ + if isinstance(order_by, dict): sort_fields = order_by.copy() else: sort_fields = {field: field for field in order_by} diff --git a/graphene_django/elasticsearch/filter/processors.py b/graphene_django/elasticsearch/filter/processors.py index ecd037f..7180f15 100644 --- a/graphene_django/elasticsearch/filter/processors.py +++ b/graphene_django/elasticsearch/filter/processors.py @@ -1,7 +1,7 @@ from collections import OrderedDict from elasticsearch_dsl import Q -from graphene import List +from graphene import List, Boolean class Processor(object): @@ -10,8 +10,8 @@ class Processor(object): def __init__(self, filter_es, parent_processor=None): """ Abstract processor to generate graphene field and ES query to lookups - :type filter_es: graphene_django.elasticsearch.filter.filterset.FilterES - :type parent_processor: graphene_django.elasticsearch.filter.filterset.Processor + :param filter_es: A FilterES target + :param parent_processor: Next Processor to the generate field chain """ self.filter_es = filter_es self.parent_processor = parent_processor @@ -30,10 +30,14 @@ class Processor(object): return self_field def get_type(self): + """Define the argument for graphene field""" return self.filter_es.argument def generate_es_query(self, data): - + """ + Define the argument for graphene field + :param data: Data passed to field in the query + """ if self.variant_name in data: value = data.get(self.variant_name) self_query = self._build_query(value) @@ -49,11 +53,19 @@ class Processor(object): return self_query def _build_field(self): + """ + Specific detail about field creation to be overwrite if necessary. + :return: A field + """ variant_name = self.variant_name - return OrderedDict({variant_name: self.filter_es}) + return OrderedDict({variant_name: self.get_type()}) def _get_variant_name(self): + """ + Make a variant based on filter name and processor suffix + :return: A variant name + """ if self.suffix_expr == self.filter_es.default_filter_processor: variant_name = self.filter_es.field_name @@ -63,6 +75,11 @@ class Processor(object): return variant_name def _build_query(self, value): + """ + Make a query based on specific processor query + :param value: Value passed to this processor + :return: A elasticsearch Query + """ result = len(self.filter_es.field_name_es) if result > 1: @@ -73,18 +90,32 @@ class Processor(object): @staticmethod def _get_query(name, value): + """ + Specific detail about query creation to be overwrite if necessary. + :param name: elasticsearch document field name + :param value: Value passed to this processor + :return: A elasticsearch Query + """ return Q('term', **{name: value}) class TermProcessor(Processor): + """Have a same behavior of parent this is only with semantic proposal""" pass class ContainsProcessor(Processor): + """fuzzy search""" suffix_expr = 'contains' @staticmethod def _get_query(name, value): + """ + Overwrite query creation + :param name: elasticsearch document field name + :param value: Value passed to this processor + :return: A elasticsearch Query + """ return Q('match', **{name: { "query": value, @@ -93,18 +124,32 @@ class ContainsProcessor(Processor): class RegexProcessor(Processor): + """Search based on regular expressions""" suffix_expr = 'regex' @staticmethod def _get_query(name, value): + """ + Overwrite query creation + :param name: elasticsearch document field name + :param value: Value passed to this processor + :return: A elasticsearch Query + """ return Q('wildcard', **{name: value}) class PhraseProcessor(Processor): + """Search by the union of many terms""" suffix_expr = 'phrase' @staticmethod def _get_query(name, value): + """ + Overwrite query creation + :param name: elasticsearch document field name + :param value: Value passed to this processor + :return: A elasticsearch Query + """ return Q('match_phrase', **{name: { "query": value @@ -112,10 +157,17 @@ class PhraseProcessor(Processor): class PrefixProcessor(Processor): + """Search by the prefix of the terms""" suffix_expr = 'prefix' @staticmethod def _get_query(name, value): + """ + Overwrite query creation + :param name: elasticsearch document field name + :param value: Value passed to this processor + :return: A elasticsearch Query + """ return Q('match_phrase_prefix', **{name: { "query": value @@ -123,36 +175,72 @@ class PrefixProcessor(Processor): class InProcessor(Processor): + """Search by many value for a field""" suffix_expr = 'in' + @staticmethod + def _get_query(name, value): + """ + Overwrite query creation + :param name: elasticsearch document field name + :param value: Value passed to this processor + :return: A elasticsearch Query + """ + return Q('terms', **{name: value}) + def get_type(self): + """Change base argument by a list of base argument""" return List(self.filter_es.argument.Argument().type) class ExitsProcessor(Processor): + """Search by if the field is in the document""" suffix_expr = 'exits' @staticmethod def _get_query(name, value): + """ + Overwrite query creation + :param name: elasticsearch document field name + :param value: Value passed to this processor + :return: A elasticsearch Query + """ return Q('bool', **{ 'must' if value else 'must_not': {'exists': {'field': name}} }) + def get_type(self): + return Boolean() + class LteProcessor(Processor): + """Search by range less than""" suffix_expr = 'lte' @staticmethod def _get_query(name, value): - return Q("bool", must={'range': {name: {'lte': value}}}) + """ + Overwrite query creation + :param name: elasticsearch document field name + :param value: Value passed to this processor + :return: A elasticsearch Query + """ + return Q('range', **{name: {'lte': value}}) class GteProcessor(Processor): + """Search by range greater than""" suffix_expr = 'gte' @staticmethod def _get_query(name, value): - return Q("bool", must={'range': {name: {'gte': value}}}) + """ + Overwrite query creation + :param name: elasticsearch document field name + :param value: Value passed to this processor + :return: A elasticsearch Query + """ + return Q("range", **{name: {'gte': value}}) PROCESSORS = { @@ -162,6 +250,7 @@ PROCESSORS = { "phrase": PhraseProcessor, "prefix": PrefixProcessor, "in": InProcessor, + "exits": ExitsProcessor, "lte": LteProcessor, "gte": GteProcessor, } diff --git a/graphene_django/elasticsearch/filter/bridges.py b/graphene_django/elasticsearch/filter/proxy.py similarity index 89% rename from graphene_django/elasticsearch/filter/bridges.py rename to graphene_django/elasticsearch/filter/proxy.py index a987712..f253d2a 100644 --- a/graphene_django/elasticsearch/filter/bridges.py +++ b/graphene_django/elasticsearch/filter/proxy.py @@ -1,5 +1,5 @@ -class QuerysetBridge(object): +class QuerysetProxy(object): """Bridge to Queryset through ES query""" def __init__(self, search): @@ -21,7 +21,7 @@ class QuerysetBridge(object): return _slice.to_queryset() -class ManagerBridge(object): +class ManagerProxy(object): """Bridge to Queryset through ES query""" def __init__(self, search_manager): @@ -30,4 +30,4 @@ class ManagerBridge(object): def get_queryset(self): """Returning self as Queryset to be the bridge""" - return QuerysetBridge(search=self.search_manager()) + return QuerysetProxy(search=self.search_manager()) diff --git a/graphene_django/elasticsearch/tests/filters.py b/graphene_django/elasticsearch/tests/filters.py index 1a01e8d..ca603b8 100644 --- a/graphene_django/elasticsearch/tests/filters.py +++ b/graphene_django/elasticsearch/tests/filters.py @@ -17,7 +17,12 @@ class ArticleDocument(DocType): """Metaclass config""" model = Article fields = [ + 'id', 'headline', + 'pub_date', + 'pub_date_time', + 'lang', + 'importance', ] @@ -37,7 +42,8 @@ class ArticleFilterESInMeta(FilterSetES): class Meta(object): """Metaclass data""" index = ArticleDocument - includes = ['headline'] + includes = ['id', 'headline'] + order_by = {'id': 'es_id'} class ArticleFilterESInMetaDict(FilterSetES): @@ -47,7 +53,17 @@ class ArticleFilterESInMetaDict(FilterSetES): index = ArticleDocument includes = { 'headline': { - 'lookup_expressions': ['term', 'contains'] + 'lookup_expressions': [ + 'term', + 'contains', + 'regex', + 'phrase', + 'prefix', + 'in', + 'exits', + 'lte', + 'gte', + ] } } @@ -66,6 +82,22 @@ class ArticleFilterMultiField(FilterSetES): ) +class ArticleFilterGenerateAll(FilterSetES): + """Article Filter for ES""" + class Meta(object): + """Metaclass data""" + index = ArticleDocument + excludes = [] + + +class ArticleFilterExcludes(FilterSetES): + """Article Filter for ES""" + class Meta(object): + """Metaclass data""" + index = ArticleDocument + excludes = ['headline'] + + class ESFilterQuery(ObjectType): """A query for ES fields""" articles_as_field = DjangoESFilterConnectionField( @@ -80,3 +112,9 @@ class ESFilterQuery(ObjectType): articles_in_multi_field = DjangoESFilterConnectionField( ArticleNode, filterset_class=ArticleFilterMultiField ) + articles_in_generate_all = DjangoESFilterConnectionField( + ArticleNode, filterset_class=ArticleFilterGenerateAll + ) + articles_in_excludes = DjangoESFilterConnectionField( + ArticleNode, filterset_class=ArticleFilterExcludes + ) diff --git a/graphene_django/elasticsearch/tests/test_fields.py b/graphene_django/elasticsearch/tests/test_fields.py index dc30d00..c7752a0 100644 --- a/graphene_django/elasticsearch/tests/test_fields.py +++ b/graphene_django/elasticsearch/tests/test_fields.py @@ -1,11 +1,15 @@ from datetime import datetime import pytest +from py.test import raises from mock import mock -from graphene import Schema +from elasticsearch_dsl.query import Bool, Match, Term, Wildcard, MatchPhrase, MatchPhrasePrefix, Range, Terms, Exists +from graphene import Schema, ObjectType -from graphene_django.elasticsearch.filter import filters +from graphene_django.elasticsearch.filter.fields import DjangoESFilterConnectionField +from graphene_django.elasticsearch.filter.filterset import FilterSetES +from graphene_django.filter.tests.test_fields import ArticleNode from graphene_django.tests.models import Article, Reporter from graphene_django.utils import DJANGO_FILTER_INSTALLED, DJANGO_ELASTICSEARCH_DSL_INSTALLED from graphene_django.elasticsearch.tests.filters import ESFilterQuery, ArticleDocument @@ -41,9 +45,7 @@ def fake_data(): return a1, a2 -def filter_generation(field, query_str, expected_arguments, method_to_mock="query"): - a1, a2 = fake_data() - +def generate_query(field, query_str): query = """ query { %s(%s) { @@ -55,6 +57,13 @@ def filter_generation(field, query_str, expected_arguments, method_to_mock="quer } } """ % (field, query_str) + return query + + +def filter_generation(field, query_str, expected_arguments, method_to_mock="query"): + a1, a2 = fake_data() + + query = generate_query(field, query_str) mock_count = mock.Mock(return_value=3) mock_slice = mock.Mock(return_value=mock.Mock(to_queryset=mock.Mock( @@ -62,10 +71,9 @@ def filter_generation(field, query_str, expected_arguments, method_to_mock="quer ))) mock_query = mock.Mock(return_value=ArticleDocument.search()) - with mock.patch('django_elasticsearch_dsl.search.Search.count', mock_count),\ - mock.patch('django_elasticsearch_dsl.search.Search.__getitem__', mock_slice),\ + with mock.patch('django_elasticsearch_dsl.search.Search.count', mock_count), \ + mock.patch('django_elasticsearch_dsl.search.Search.__getitem__', mock_slice), \ mock.patch("elasticsearch_dsl.Search.%s" % method_to_mock, mock_query): - schema = Schema(query=ESFilterQuery) result = schema.execute(query) @@ -82,7 +90,7 @@ def test_filter_string(): filter_generation( "articlesAsField", "headline: \"A text\"", - filters.StringFilterES(field_name='headline').generate_es_query({"headline": "A text"}), + Bool(must=[Match(headline={'query': 'A text', 'fuzziness': 'auto'})]), ) @@ -90,7 +98,7 @@ def test_filter_string_date(): filter_generation( "articlesAsField", "headline: \"A text\"", - filters.StringFilterES(field_name='headline').generate_es_query({"headline": "A text"}), + Bool(must=[Match(headline={'query': 'A text', 'fuzziness': 'auto'})]), ) @@ -103,11 +111,20 @@ def test_filter_as_field_order_by(): ) +def test_filter_as_field_order_by_dict(): + filter_generation( + "articlesInMeta", + "headline: \"A text\", sort:{order:desc, field:id}", + {'es_id': {'order': 'desc'}}, + "sort" + ) + + def test_filter_in_meta(): filter_generation( "articlesInMeta", "headline: \"A text\"", - filters.StringFilterES(field_name='headline').generate_es_query({"headline": "A text"}), + Bool(must=[Match(headline={'query': 'A text', 'fuzziness': 'auto'})]), ) @@ -115,7 +132,7 @@ def test_filter_in_meta_dict(): filter_generation( "articlesInMetaDict", "headline: \"A text\"", - filters.StringFilterES(field_name='headline').generate_es_query({"headline": "A text"}), + Bool(must=[Match(headline={'query': 'A text', 'fuzziness': 'auto'})]), ) @@ -123,8 +140,178 @@ def test_filter_in_multi_field(): filter_generation( "articlesInMultiField", "contain: \"A text\"", - filters.StringFilterES( - field_name='contain', - field_name_es=['headline', 'lang'], - ).generate_es_query({"contain": "A text"}), + Bool(must=[Bool(should=[ + Match(headline={'query': 'A text', 'fuzziness': 'auto'}), + Match(lang={'query': 'A text', 'fuzziness': 'auto'}) + ])]), + ) + + +def test_filter_generating_all(): + filter_generation( + "articlesInGenerateAll", + "headline: \"A text\", " + "pubDate: \"0000-00-00\", " + "pubDateTime: \"00:00:00\", " + "lang: \"es\", " + "importance: 1, ", + Bool(must=[ + Match(headline={'query': 'A text', 'fuzziness': 'auto'}), + Match(pub_date={'query': '0000-00-00', 'fuzziness': 'auto'}), + Match(pub_date_time={'query': '00:00:00', 'fuzziness': 'auto'}), + Match(lang={'query': 'es', 'fuzziness': 'auto'}), + Term(importance=1) + ]), + ) + + +def test_filter_generating_exclude(): + query = generate_query("articlesInExcludes", "headline: \"A text\", ") + + schema = Schema(query=ESFilterQuery) + result = schema.execute(query) + + assert len(result.errors) > 0 + + +def test_filter_bad_processor(): + class ArticleFilterBadProcessor(FilterSetES): + """Article Filter for ES""" + + class Meta(object): + """Metaclass data""" + index = ArticleDocument + includes = { + 'headline': { + 'lookup_expressions': ['bad_processor'] + } + } + + with raises(ValueError) as error_info: + DjangoESFilterConnectionField( + ArticleNode, filterset_class=ArticleFilterBadProcessor + ) + + assert "bad_processor" in str(error_info.value) + + +def test_filter_field_without_filterset_class(): + with raises(ValueError) as error_info: + DjangoESFilterConnectionField( + ArticleNode + ) + + assert "filterset_class" in str(error_info.value) + + +def test_filter_field_with_fields(): + with raises(ValueError) as error_info: + DjangoESFilterConnectionField( + ArticleNode, fields=['headline'] + ) + + assert "fields" in str(error_info.value) + + +def test_filter_field_with_order_by(): + with raises(ValueError) as error_info: + DjangoESFilterConnectionField( + ArticleNode, order_by=['headline'] + ) + + assert "order_by" in str(error_info.value) + + +def test_filter_filterset_without_index(): + with raises(ValueError) as error_info: + class ArticleFilterBadProcessor(FilterSetES): + """Article Filter for ES""" + + class Meta(object): + """Metaclass data""" + + DjangoESFilterConnectionField( + ArticleNode, filterset_class=ArticleFilterBadProcessor + ) + + assert "Index in Meta" in str(error_info.value) + + +def test_filter_filterset_without_xcludes(): + with raises(ValueError) as error_info: + class ArticleFilterBadProcessor(FilterSetES): + """Article Filter for ES""" + + class Meta(object): + """Metaclass data""" + index = ArticleDocument + + DjangoESFilterConnectionField( + ArticleNode, filterset_class=ArticleFilterBadProcessor + ) + + assert "includes or excludes field in Meta" in str(error_info.value) + + +def test_processor_term(): + filter_generation( + "articlesInMetaDict", + "headlineTerm: \"A text\"", + Bool(must=[Term(headline='A text')]), + ) + + +def test_processor_regex(): + filter_generation( + "articlesInMetaDict", + "headlineRegex: \"A text\"", + Bool(must=[Wildcard(headline='A text')]), + ) + + +def test_processor_phrase(): + filter_generation( + "articlesInMetaDict", + "headlinePhrase: \"A text\"", + Bool(must=[MatchPhrase(headline={'query': 'A text'})]), + ) + + +def test_processor_prefix(): + filter_generation( + "articlesInMetaDict", + "headlinePrefix: \"A text\"", + Bool(must=[MatchPhrasePrefix(headline={'query': 'A text'})]), + ) + + +def test_processor_in(): + filter_generation( + "articlesInMetaDict", + "headlineIn: [\"A text 1\", \"A text 2\"]", + Bool(must=[Terms(headline=['A text 1', 'A text 2'])]), + ) + + +def test_processor_exits(): + filter_generation( + "articlesInMetaDict", + "headlineExits: true", + Bool(must=[Bool(must=[Exists(field='headline')])]), + ) + + +def test_processor_lte(): + filter_generation( + "articlesInMetaDict", + "headlineLte: \"A text\"", + Bool(must=Range(headline={'lte': 'A text'})), + ) + + +def test_processor_gte(): + filter_generation( + "articlesInMetaDict", + "headlineGte: \"A text\"", + Bool(must=Range(headline={'gte': 'A text'})), )