spaCy/spacy/tokens.pxd

91 lines
2.2 KiB
Cython
Raw Normal View History

2014-12-16 14:44:43 +03:00
from libc.stdint cimport uint32_t
2015-01-19 11:59:55 +03:00
from numpy cimport ndarray
cimport numpy
2014-12-02 15:48:05 +03:00
from cymem.cymem cimport Pool
2014-12-21 23:25:43 +03:00
from thinc.typedefs cimport atom_t
from .typedefs cimport flags_t, attr_id_t, attr_t
from .parts_of_speech cimport univ_pos_t
from .structs cimport Morphology, TokenC, LexemeC
2014-12-21 23:25:43 +03:00
from .vocab cimport Vocab
2014-12-19 23:03:26 +03:00
from .strings cimport StringStore
ctypedef const LexemeC* const_Lexeme_ptr
ctypedef TokenC* TokenC_ptr
ctypedef fused LexemeOrToken:
const_Lexeme_ptr
TokenC_ptr
cdef attr_t get_lex_attr(const LexemeC* lex, attr_id_t feat_name) nogil
2014-12-24 09:42:00 +03:00
cdef attr_t get_token_attr(const TokenC* lex, attr_id_t feat_name) nogil
cdef inline bint check_flag(const LexemeC* lexeme, attr_id_t flag_id) nogil:
2014-12-24 09:42:00 +03:00
return lexeme.flags & (1 << flag_id)
cdef class Tokens:
cdef Pool mem
2014-12-21 23:25:43 +03:00
cdef Vocab vocab
2015-04-19 11:31:31 +03:00
cdef TokenC* data
2015-04-19 11:31:31 +03:00
2015-01-30 10:04:41 +03:00
cdef list _py_tokens
2015-01-21 10:57:09 +03:00
cdef unicode _string
cdef tuple _tag_strings
cdef public bint is_tagged
cdef public bint is_parsed
cdef int length
cdef int max_length
cdef int push_back(self, int i, LexemeOrToken lex_or_tok) except -1
2015-01-05 09:54:13 +03:00
cpdef long[:,:] to_array(self, object features)
2014-12-02 15:48:05 +03:00
cdef int set_parse(self, const TokenC* parsed) except -1
2015-03-14 02:21:16 +03:00
cdef class Token:
cdef Vocab vocab
cdef unicode _string
cdef const TokenC* c
2015-01-31 08:37:13 +03:00
cdef readonly int i
cdef int array_len
cdef bint _owns_c_data
2015-04-19 11:31:31 +03:00
cdef Tokens _seq
@staticmethod
cdef inline Token cinit(Vocab vocab, unicode string,
const TokenC* token, int offset, int array_len,
Tokens parent_seq):
if offset < 0 or offset >= array_len:
msg = "Attempt to access token at %d, max length %d"
raise IndexError(msg % (offset, array_len))
if parent_seq._py_tokens[offset] is not None:
return parent_seq._py_tokens[offset]
cdef Token self = Token.__new__(Token, vocab, string)
self.c = token
self.i = offset
self.array_len = array_len
self._seq = parent_seq
self._seq._py_tokens[offset] = self
return self
cdef int take_ownership_of_c_data(self) except -1
cpdef bint check_flag(self, attr_id_t flag_id) except -1