From 63f5651f8d92665810c672cf923a95e2eccb0f80 Mon Sep 17 00:00:00 2001 From: Matthew Honnibal Date: Fri, 6 Jul 2018 12:32:11 +0200 Subject: [PATCH] Fix tokenizer serialization --- spacy/tokenizer.pyx | 8 ++++---- 1 file changed, 4 insertions(+), 4 deletions(-) diff --git a/spacy/tokenizer.pyx b/spacy/tokenizer.pyx index 65c31fb53..8c389fb7e 100644 --- a/spacy/tokenizer.pyx +++ b/spacy/tokenizer.pyx @@ -375,10 +375,10 @@ cdef class Tokenizer: """ serializers = OrderedDict(( ('vocab', lambda: self.vocab.to_bytes()), - ('prefix_search', _get_regex_pattern(self.prefix_search)), - ('suffix_search', _get_regex_pattern(self.suffix_search)), - ('infix_finditer', _get_regex_pattern(self.infix_finditer)), - ('token_match', _get_regex_pattern(self.token_match)), + ('prefix_search', lambda: _get_regex_pattern(self.prefix_search)), + ('suffix_search', lambda: _get_regex_pattern(self.suffix_search)), + ('infix_finditer', lambda: _get_regex_pattern(self.infix_finditer)), + ('token_match', lambda: _get_regex_pattern(self.token_match)), ('exceptions', lambda: OrderedDict(sorted(self._rules.items()))) )) return util.to_bytes(serializers, exclude)