spaCy/spacy/lang/es/tokenizer_exceptions.py

77 lines
1.6 KiB
Python
Raw Normal View History

from ...symbols import ORTH, LEMMA, NORM, PRON_LEMMA
2016-12-18 18:54:19 +03:00
2017-05-08 16:48:04 +03:00
_exc = {
"pal": [{ORTH: "pa", LEMMA: "para"}, {ORTH: "l", LEMMA: "el", NORM: "el"}],
2017-05-08 16:48:04 +03:00
}
2017-05-08 16:48:04 +03:00
for exc_data in [
{ORTH: "", LEMMA: "número"},
{ORTH: "°C", LEMMA: "grados Celcius"},
2017-05-08 16:48:04 +03:00
{ORTH: "aprox.", LEMMA: "aproximadamente"},
{ORTH: "dna.", LEMMA: "docena"},
{ORTH: "dpto.", LEMMA: "departamento"},
{ORTH: "ej.", LEMMA: "ejemplo"},
2017-05-08 16:48:04 +03:00
{ORTH: "esq.", LEMMA: "esquina"},
{ORTH: "pág.", LEMMA: "página"},
{ORTH: "p.ej.", LEMMA: "por ejemplo"},
{ORTH: "Ud.", LEMMA: PRON_LEMMA, NORM: "usted"},
{ORTH: "Vd.", LEMMA: PRON_LEMMA, NORM: "usted"},
{ORTH: "Uds.", LEMMA: PRON_LEMMA, NORM: "ustedes"},
{ORTH: "Vds.", LEMMA: PRON_LEMMA, NORM: "ustedes"},
{ORTH: "vol.", NORM: "volúmen"},
]:
2017-11-02 01:02:45 +03:00
_exc[exc_data[ORTH]] = [exc_data]
2017-05-08 16:48:04 +03:00
# Times
_exc["12m."] = [{ORTH: "12"}, {ORTH: "m.", LEMMA: "p.m."}]
2017-05-08 16:48:04 +03:00
for h in range(1, 12 + 1):
for period in ["a.m.", "am"]:
2019-12-25 19:59:52 +03:00
_exc[f"{h}{period}"] = [{ORTH: f"{h}"}, {ORTH: period, LEMMA: "a.m."}]
2017-05-08 16:48:04 +03:00
for period in ["p.m.", "pm"]:
2019-12-25 19:59:52 +03:00
_exc[f"{h}{period}"] = [{ORTH: f"{h}"}, {ORTH: period, LEMMA: "p.m."}]
2017-05-08 16:48:04 +03:00
for orth in [
"a.C.",
"a.J.C.",
"d.C.",
"d.J.C.",
"apdo.",
"Av.",
"Avda.",
"Cía.",
"Dr.",
"Dra.",
"EE.UU.",
"etc.",
"fig.",
"Gob.",
"Gral.",
"Ing.",
"J.C.",
"km/h",
"Lic.",
"m.n.",
"núm.",
"P.D.",
"Prof.",
"Profa.",
"q.e.p.d.",
2020-06-20 16:52:00 +03:00
"Q.E.P.D." "S.A.",
"S.L.",
2020-06-20 16:52:00 +03:00
"S.R.L." "s.s.s.",
"Sr.",
"Sra.",
"Srta.",
]:
2017-05-08 16:48:04 +03:00
_exc[orth] = [{ORTH: orth}]
2016-12-18 18:54:19 +03:00
TOKENIZER_EXCEPTIONS = _exc