mirror of
https://github.com/explosion/spaCy.git
synced 2024-12-29 11:26:28 +03:00
1a5be63715
* cleanup Tokenizer fields * remove unused object from vocab * remove IS_OOV_DEPRECATED * add back in as FLAG13 * FLAG 18 instead * import fix * fix clumpsy fingers * revert symbol changes in favor of #11352 * bint instead of bool
51 lines
2.1 KiB
Cython
51 lines
2.1 KiB
Cython
from libcpp.vector cimport vector
|
|
from preshed.maps cimport PreshMap
|
|
from cymem.cymem cimport Pool
|
|
|
|
from .typedefs cimport hash_t
|
|
from .structs cimport LexemeC, SpanC, TokenC
|
|
from .strings cimport StringStore
|
|
from .tokens.doc cimport Doc
|
|
from .vocab cimport Vocab, LexemesOrTokens, _Cached
|
|
from .matcher.phrasematcher cimport PhraseMatcher
|
|
|
|
|
|
cdef class Tokenizer:
|
|
cdef Pool mem
|
|
cdef PreshMap _cache
|
|
cdef PreshMap _specials
|
|
cdef readonly Vocab vocab
|
|
|
|
cdef object _token_match
|
|
cdef object _url_match
|
|
cdef object _prefix_search
|
|
cdef object _suffix_search
|
|
cdef object _infix_finditer
|
|
cdef object _rules
|
|
cdef PhraseMatcher _special_matcher
|
|
cdef bint _faster_heuristics
|
|
|
|
cdef Doc _tokenize_affixes(self, str string, bint with_special_cases)
|
|
cdef int _apply_special_cases(self, Doc doc) except -1
|
|
cdef void _filter_special_spans(self, vector[SpanC] &original,
|
|
vector[SpanC] &filtered, int doc_len) nogil
|
|
cdef object _prepare_special_spans(self, Doc doc,
|
|
vector[SpanC] &filtered)
|
|
cdef int _retokenize_special_spans(self, Doc doc, TokenC* tokens,
|
|
object span_data)
|
|
cdef int _try_specials_and_cache(self, hash_t key, Doc tokens,
|
|
int* has_special,
|
|
bint with_special_cases) except -1
|
|
cdef int _tokenize(self, Doc tokens, str span, hash_t key,
|
|
int* has_special, bint with_special_cases) except -1
|
|
cdef str _split_affixes(self, Pool mem, str string,
|
|
vector[LexemeC*] *prefixes,
|
|
vector[LexemeC*] *suffixes, int* has_special,
|
|
bint with_special_cases)
|
|
cdef int _attach_tokens(self, Doc tokens, str string,
|
|
vector[LexemeC*] *prefixes,
|
|
vector[LexemeC*] *suffixes, int* has_special,
|
|
bint with_special_cases) except -1
|
|
cdef int _save_cached(self, const TokenC* tokens, hash_t key,
|
|
int* has_special, int n) except -1
|