From 6c8d6277337b65ffc879421dd5ac510c741c7fce Mon Sep 17 00:00:00 2001 From: Matthew Honnibal Date: Fri, 6 Jul 2018 12:36:33 +0200 Subject: [PATCH] Fix tokenizer deserialization --- spacy/tokenizer.pyx | 6 +++--- 1 file changed, 3 insertions(+), 3 deletions(-) diff --git a/spacy/tokenizer.pyx b/spacy/tokenizer.pyx index 8c389fb7e..8a679bdc4 100644 --- a/spacy/tokenizer.pyx +++ b/spacy/tokenizer.pyx @@ -400,11 +400,11 @@ cdef class Tokenizer: ('exceptions', lambda b: data.setdefault('rules', b)) )) msg = util.from_bytes(bytes_data, deserializers, exclude) - if 'prefix_search' in data: + if data.get('prefix_search'): self.prefix_search = re.compile(data['prefix_search']).search - if 'suffix_search' in data: + if data.get('suffix_search'): self.suffix_search = re.compile(data['suffix_search']).search - if 'infix_finditer' in data: + if data.get('infix_finditer'): self.infix_finditer = re.compile(data['infix_finditer']).finditer if data.get('token_match'): self.token_match = re.compile(data['token_match']).search