mirror of
https://github.com/explosion/spaCy.git
synced 2024-11-11 12:18:04 +03:00
db55577c45
* Remove unicode declarations * Remove Python 3.5 and 2.7 from CI * Don't require pathlib * Replace compat helpers * Remove OrderedDict * Use f-strings * Set Cython compiler language level * Fix typo * Re-add OrderedDict for Table * Update setup.cfg * Revert CONTRIBUTING.md * Revert lookups.md * Revert top-level.md * Small adjustments and docs [ci skip]
24 lines
645 B
Python
24 lines
645 B
Python
from ._tokenizer_exceptions_list import PL_BASE_EXCEPTIONS
|
|
from ...symbols import POS, ADV, NOUN, ORTH, LEMMA, ADJ
|
|
|
|
|
|
_exc = {}
|
|
|
|
for exc_data in [
|
|
{ORTH: "m.in.", LEMMA: "między innymi", POS: ADV},
|
|
{ORTH: "inż.", LEMMA: "inżynier", POS: NOUN},
|
|
{ORTH: "mgr.", LEMMA: "magister", POS: NOUN},
|
|
{ORTH: "tzn.", LEMMA: "to znaczy", POS: ADV},
|
|
{ORTH: "tj.", LEMMA: "to jest", POS: ADV},
|
|
{ORTH: "tzw.", LEMMA: "tak zwany", POS: ADJ},
|
|
]:
|
|
_exc[exc_data[ORTH]] = [exc_data]
|
|
|
|
for orth in ["w.", "r."]:
|
|
_exc[orth] = [{ORTH: orth}]
|
|
|
|
for orth in PL_BASE_EXCEPTIONS:
|
|
_exc[orth] = [{ORTH: orth}]
|
|
|
|
TOKENIZER_EXCEPTIONS = _exc
|