mirror of
https://github.com/explosion/spaCy.git
synced 2024-12-26 01:46:28 +03:00
Avoid loading all French exceptions on import
Move exceptions loading behind a get_tokenizer_exceptions() function for French, instead of loading into the top-level namespace. This cuts import times from 0.6s to 0.2s, at the expense of making the French data a little different from the others (there's no top-level TOKENIZER_EXCEPTIONS variable.) The current solution feels somewhat unsatisfying.
This commit is contained in:
parent
7c1260e98c
commit
26446aa728
|
@ -19,7 +19,7 @@ class FrenchDefaults(BaseDefaults):
|
||||||
|
|
||||||
@classmethod
|
@classmethod
|
||||||
def create_tokenizer(cls, nlp=None):
|
def create_tokenizer(cls, nlp=None):
|
||||||
cls.tokenizer_exceptions = TOKENIZER_EXCEPTIONS
|
cls.tokenizer_exceptions = get_tokenizer_exceptions()
|
||||||
return super(FrenchDefaults, cls).create_tokenizer(nlp)
|
return super(FrenchDefaults, cls).create_tokenizer(nlp)
|
||||||
|
|
||||||
|
|
||||||
|
|
|
@ -2,10 +2,10 @@
|
||||||
from __future__ import unicode_literals
|
from __future__ import unicode_literals
|
||||||
|
|
||||||
from .stop_words import STOP_WORDS
|
from .stop_words import STOP_WORDS
|
||||||
from .tokenizer_exceptions import TOKENIZER_EXCEPTIONS, TOKEN_MATCH
|
from .tokenizer_exceptions import get_tokenizer_exceptions, TOKEN_MATCH
|
||||||
|
|
||||||
|
|
||||||
STOP_WORDS = set(STOP_WORDS)
|
STOP_WORDS = set(STOP_WORDS)
|
||||||
|
|
||||||
|
|
||||||
__all__ = ["STOP_WORDS", "TOKENIZER_EXCEPTIONS", "TOKEN_MATCH"]
|
__all__ = ["STOP_WORDS", "get_tokenizer_exceptions", "TOKEN_MATCH"]
|
||||||
|
|
|
@ -217,6 +217,6 @@ REGULAR_EXP.append(_URL_PATTERN)
|
||||||
|
|
||||||
TOKEN_MATCH = re.compile('|'.join('(?:{})'.format(m) for m in REGULAR_EXP), re.IGNORECASE).match
|
TOKEN_MATCH = re.compile('|'.join('(?:{})'.format(m) for m in REGULAR_EXP), re.IGNORECASE).match
|
||||||
|
|
||||||
TOKENIZER_EXCEPTIONS = get_tokenizer_exceptions()
|
#TOKENIZER_EXCEPTIONS = get_tokenizer_exceptions()
|
||||||
|
|
||||||
__all__ = ["TOKENIZER_EXCEPTIONS", "TOKEN_MATCH"]
|
__all__ = ["get_tokenizer_exceptions", "TOKEN_MATCH"]
|
||||||
|
|
Loading…
Reference in New Issue
Block a user