mirror of
				https://github.com/explosion/spaCy.git
				synced 2025-11-04 09:57:26 +03:00 
			
		
		
		
	
		
			
				
	
	
		
			84 lines
		
	
	
		
			1.4 KiB
		
	
	
	
		
			Python
		
	
	
	
	
	
			
		
		
	
	
			84 lines
		
	
	
		
			1.4 KiB
		
	
	
	
		
			Python
		
	
	
	
	
	
from ..tokenizer_exceptions import BASE_EXCEPTIONS
 | 
						|
from ...symbols import ORTH, NORM
 | 
						|
from ...util import update_exc
 | 
						|
 | 
						|
 | 
						|
_exc = {
 | 
						|
    "pal": [{ORTH: "pa"}, {ORTH: "l", NORM: "el"}],
 | 
						|
}
 | 
						|
 | 
						|
 | 
						|
for exc_data in [
 | 
						|
    {ORTH: "n°"},
 | 
						|
    {ORTH: "°C"},
 | 
						|
    {ORTH: "aprox."},
 | 
						|
    {ORTH: "dna."},
 | 
						|
    {ORTH: "dpto."},
 | 
						|
    {ORTH: "ej."},
 | 
						|
    {ORTH: "esq."},
 | 
						|
    {ORTH: "pág."},
 | 
						|
    {ORTH: "p.ej."},
 | 
						|
    {ORTH: "Ud.", NORM: "usted"},
 | 
						|
    {ORTH: "Vd.", NORM: "usted"},
 | 
						|
    {ORTH: "Uds.", NORM: "ustedes"},
 | 
						|
    {ORTH: "Vds.", NORM: "ustedes"},
 | 
						|
    {ORTH: "vol.", NORM: "volúmen"},
 | 
						|
]:
 | 
						|
    _exc[exc_data[ORTH]] = [exc_data]
 | 
						|
 | 
						|
 | 
						|
# Times
 | 
						|
 | 
						|
_exc["12m."] = [{ORTH: "12"}, {ORTH: "m."}]
 | 
						|
 | 
						|
 | 
						|
for h in range(1, 12 + 1):
 | 
						|
    for period in ["a.m.", "am"]:
 | 
						|
        _exc[f"{h}{period}"] = [{ORTH: f"{h}"}, {ORTH: period}]
 | 
						|
    for period in ["p.m.", "pm"]:
 | 
						|
        _exc[f"{h}{period}"] = [{ORTH: f"{h}"}, {ORTH: period}]
 | 
						|
 | 
						|
 | 
						|
for orth in [
 | 
						|
    "a.C.",
 | 
						|
    "a.J.C.",
 | 
						|
    "d.C.",
 | 
						|
    "d.J.C.",
 | 
						|
    "apdo.",
 | 
						|
    "Av.",
 | 
						|
    "Avda.",
 | 
						|
    "Cía.",
 | 
						|
    "Dr.",
 | 
						|
    "Dra.",
 | 
						|
    "EE.UU.",
 | 
						|
    "Ee.Uu.",
 | 
						|
    "EE. UU.",
 | 
						|
    "Ee. Uu.",
 | 
						|
    "etc.",
 | 
						|
    "fig.",
 | 
						|
    "Gob.",
 | 
						|
    "Gral.",
 | 
						|
    "Ing.",
 | 
						|
    "J.C.",
 | 
						|
    "km/h",
 | 
						|
    "Lic.",
 | 
						|
    "m.n.",
 | 
						|
    "núm.",
 | 
						|
    "P.D.",
 | 
						|
    "Prof.",
 | 
						|
    "Profa.",
 | 
						|
    "q.e.p.d.",
 | 
						|
    "Q.E.P.D.",
 | 
						|
    "S.A.",
 | 
						|
    "S.L.",
 | 
						|
    "S.R.L.",
 | 
						|
    "s.s.s.",
 | 
						|
    "Sr.",
 | 
						|
    "Sra.",
 | 
						|
    "Srta.",
 | 
						|
]:
 | 
						|
    _exc[orth] = [{ORTH: orth}]
 | 
						|
 | 
						|
 | 
						|
TOKENIZER_EXCEPTIONS = update_exc(BASE_EXCEPTIONS, _exc)
 |