mirror of
https://github.com/explosion/spaCy.git
synced 2024-12-27 10:26:35 +03:00
Fix customized-tokenizer tests
This commit is contained in:
parent
34c585396a
commit
45029a550e
|
@ -9,8 +9,8 @@ import pytest
|
|||
|
||||
@pytest.fixture
|
||||
def tokenizer(en_vocab):
|
||||
prefix_re = util.compile_prefix_regex(nlp_model.Defaults.prefixes)
|
||||
suffix_re = util.compile_suffix_regex(nlp_model.Defaults.suffixes)
|
||||
prefix_re = util.compile_prefix_regex(English.Defaults.prefixes)
|
||||
suffix_re = util.compile_suffix_regex(English.Defaults.suffixes)
|
||||
custom_infixes = ['\.\.\.+',
|
||||
'(?<=[0-9])-(?=[0-9])',
|
||||
# '(?<=[0-9]+),(?=[0-9]+)',
|
||||
|
|
Loading…
Reference in New Issue
Block a user