mirror of
https://github.com/explosion/spaCy.git
synced 2024-12-28 02:46:35 +03:00
9bc4cc1352
* Test on #2396: bug in Doc.get_lca_matrix() * reimplementation of Doc.get_lca_matrix(), (closes #2396) * reimplement Span.get_lca_matrix(), and call it from Doc.get_lca_matrix() * tests Span.get_lca_matrix() as well as Doc.get_lca_matrix() * implement _get_lca_matrix as a helper function in doc.pyx; call it from Doc.get_lca_matrix and Span.get_lca_matrix * use memory view instead of np.ndarray in _get_lca_matrix (faster) * fix bug when calling Span.get_lca_matrix; return lca matrix as np.array instead of memoryview * cleaner conditional, add comment
72 lines
1.6 KiB
Cython
72 lines
1.6 KiB
Cython
from cymem.cymem cimport Pool
|
|
cimport numpy as np
|
|
from preshed.counter cimport PreshCounter
|
|
|
|
from ..vocab cimport Vocab
|
|
from ..structs cimport TokenC, LexemeC
|
|
from ..typedefs cimport attr_t
|
|
from ..attrs cimport attr_id_t
|
|
|
|
|
|
cdef attr_t get_token_attr(const TokenC* token, attr_id_t feat_name) nogil
|
|
|
|
|
|
ctypedef const LexemeC* const_Lexeme_ptr
|
|
ctypedef const TokenC* const_TokenC_ptr
|
|
|
|
ctypedef fused LexemeOrToken:
|
|
const_Lexeme_ptr
|
|
const_TokenC_ptr
|
|
|
|
|
|
cdef int set_children_from_heads(TokenC* tokens, int length) except -1
|
|
|
|
|
|
cdef int token_by_start(const TokenC* tokens, int length, int start_char) except -2
|
|
|
|
|
|
cdef int token_by_end(const TokenC* tokens, int length, int end_char) except -2
|
|
|
|
|
|
cdef int set_children_from_heads(TokenC* tokens, int length) except -1
|
|
|
|
|
|
cdef int [:,:] _get_lca_matrix(Doc, int start, int end)
|
|
|
|
cdef class Doc:
|
|
cdef readonly Pool mem
|
|
cdef readonly Vocab vocab
|
|
|
|
cdef public object _vector
|
|
cdef public object _vector_norm
|
|
|
|
cdef public object tensor
|
|
cdef public object cats
|
|
cdef public object user_data
|
|
|
|
cdef TokenC* c
|
|
|
|
cdef public bint is_tagged
|
|
cdef public bint is_parsed
|
|
|
|
cdef public float sentiment
|
|
|
|
cdef public dict user_hooks
|
|
cdef public dict user_token_hooks
|
|
cdef public dict user_span_hooks
|
|
|
|
cdef public list _py_tokens
|
|
|
|
cdef int length
|
|
cdef int max_length
|
|
|
|
cdef public object noun_chunks_iterator
|
|
|
|
cdef object __weakref__
|
|
|
|
cdef int push_back(self, LexemeOrToken lex_or_tok, bint has_space) except -1
|
|
|
|
cpdef np.ndarray to_array(self, object features)
|
|
|
|
cdef void set_parse(self, const TokenC* parsed) nogil
|