Fix tokenizer test

This commit is contained in:
Ines Montani 2016-12-18 16:55:32 +01:00
parent 753068f1d5
commit d1c1d3f9cd

View File

@ -8,9 +8,9 @@ import cloudpickle
import tempfile
from ... import util
from ...en.language_data import TOKENIZER_PREFIXES as EN_TOKENIZER_PREFIXES
from ...language_data import TOKENIZER_PREFIXES
en_search_prefixes = util.compile_prefix_regex(EN_TOKENIZER_PREFIXES).search
en_search_prefixes = util.compile_prefix_regex(TOKENIZER_PREFIXES).search
# @pytest.mark.xfail
# def test_pickle(en_tokenizer):