mirror of
https://github.com/explosion/spaCy.git
synced 2024-11-11 12:18:04 +03:00
f277bfdf0f
* Draft out initial Spans data structure * Initial span group commit * Basic span group support on Doc * Basic test for span group * Compile span_group.pyx * Draft addition of SpanGroup to DocBin * Add deserialization for SpanGroup * Add tests for serializing SpanGroup * Fix serialization of SpanGroup * Add EdgeC and GraphC structs * Add draft Graph data structure * Compile graph * More work on Graph * Update GraphC * Upd graph * Fix walk functions * Let Graph take nodes and edges on construction * Fix walking and getting * Add graph tests * Fix import * Add module with the SpanGroups dict thingy * Update test * Rename 'span_groups' attribute * Try to fix c++11 compilation * Fix test * Update DocBin * Try to fix compilation * Try to fix graph * Improve SpanGroup docstrings * Add doc.spans to documentation * Fix serialization * Tidy up and add docs * Update docs [ci skip] * Add SpanGroup.has_overlap * WIP updated Graph API * Start testing new Graph API * Update Graph tests * Update Graph * Add docstring Co-authored-by: Ines Montani <ines@ines.io>
72 lines
1.6 KiB
Cython
72 lines
1.6 KiB
Cython
from cymem.cymem cimport Pool
|
|
cimport numpy as np
|
|
|
|
from ..vocab cimport Vocab
|
|
from ..structs cimport TokenC, LexemeC, SpanC
|
|
from ..typedefs cimport attr_t
|
|
from ..attrs cimport attr_id_t
|
|
|
|
|
|
cdef attr_t get_token_attr(const TokenC* token, attr_id_t feat_name) nogil
|
|
cdef attr_t get_token_attr_for_matcher(const TokenC* token, attr_id_t feat_name) nogil
|
|
|
|
|
|
ctypedef const LexemeC* const_Lexeme_ptr
|
|
ctypedef const TokenC* const_TokenC_ptr
|
|
|
|
ctypedef fused LexemeOrToken:
|
|
const_Lexeme_ptr
|
|
const_TokenC_ptr
|
|
|
|
|
|
cdef int set_children_from_heads(TokenC* tokens, int start, int end) except -1
|
|
|
|
|
|
cdef int _set_lr_kids_and_edges(TokenC* tokens, int start, int end, int loop_count) except -1
|
|
|
|
|
|
cdef int token_by_start(const TokenC* tokens, int length, int start_char) except -2
|
|
|
|
|
|
cdef int token_by_end(const TokenC* tokens, int length, int end_char) except -2
|
|
|
|
|
|
cdef int [:,:] _get_lca_matrix(Doc, int start, int end)
|
|
|
|
|
|
cdef class Doc:
|
|
cdef readonly Pool mem
|
|
cdef readonly Vocab vocab
|
|
|
|
cdef public object _vector
|
|
cdef public object _vector_norm
|
|
|
|
cdef public object tensor
|
|
cdef public object cats
|
|
cdef public object user_data
|
|
cdef readonly object spans
|
|
|
|
cdef TokenC* c
|
|
|
|
cdef public float sentiment
|
|
|
|
cdef public dict user_hooks
|
|
cdef public dict user_token_hooks
|
|
cdef public dict user_span_hooks
|
|
|
|
cdef public bint has_unknown_spaces
|
|
|
|
cdef public list _py_tokens
|
|
|
|
cdef int length
|
|
cdef int max_length
|
|
|
|
|
|
cdef public object noun_chunks_iterator
|
|
|
|
cdef object __weakref__
|
|
|
|
cdef int push_back(self, LexemeOrToken lex_or_tok, bint has_space) except -1
|
|
|
|
cpdef np.ndarray to_array(self, object features)
|