Fallback if vectors.attr doesn't exist

This commit is contained in:
Adriane Boyd 2023-05-22 08:49:44 +02:00
parent 79bd698b73
commit fa7971ce97
4 changed files with 7 additions and 9 deletions

View File

@ -45,9 +45,7 @@ def forward(
if not token_count: if not token_count:
return _handle_empty(model.ops, model.get_dim("nO")) return _handle_empty(model.ops, model.get_dim("nO"))
vocab: Vocab = docs[0].vocab vocab: Vocab = docs[0].vocab
key_attr: int = ORTH key_attr: int = getattr(vocab.vectors, "attr", ORTH)
if hasattr(vocab.vectors, "attr"):
key_attr = vocab.vectors.attr
keys = model.ops.flatten([cast(Ints1d, doc.to_array(key_attr)) for doc in docs]) keys = model.ops.flatten([cast(Ints1d, doc.to_array(key_attr)) for doc in docs])
W = cast(Floats2d, model.ops.as_contig(model.get_param("W"))) W = cast(Floats2d, model.ops.as_contig(model.get_param("W")))
if vocab.vectors.mode == Mode.default: if vocab.vectors.mode == Mode.default:

View File

@ -25,7 +25,7 @@ from ..lexeme cimport Lexeme, EMPTY_LEXEME
from ..typedefs cimport attr_t, flags_t from ..typedefs cimport attr_t, flags_t
from ..attrs cimport attr_id_t from ..attrs cimport attr_id_t
from ..attrs cimport LENGTH, POS, LEMMA, TAG, MORPH, DEP, HEAD, SPACY, ENT_IOB from ..attrs cimport LENGTH, POS, LEMMA, TAG, MORPH, DEP, HEAD, SPACY, ENT_IOB
from ..attrs cimport ENT_TYPE, ENT_ID, ENT_KB_ID, SENT_START, IDX, NORM from ..attrs cimport ENT_TYPE, ENT_ID, ENT_KB_ID, SENT_START, IDX, NORM, ORTH
from ..attrs import intify_attr, IDS from ..attrs import intify_attr, IDS
from ..compat import copy_reg, pickle from ..compat import copy_reg, pickle
@ -591,7 +591,7 @@ cdef class Doc:
""" """
if "similarity" in self.user_hooks: if "similarity" in self.user_hooks:
return self.user_hooks["similarity"](self, other) return self.user_hooks["similarity"](self, other)
attr = self.doc.vocab.vectors.attr attr = getattr(self.vocab.vectors, "attr", ORTH)
cdef Token this_token cdef Token this_token
cdef Token other_token cdef Token other_token
cdef Lexeme other_lex cdef Lexeme other_lex

View File

@ -10,7 +10,7 @@ from .doc cimport token_by_start, token_by_end, get_token_attr, _get_lca_matrix
from .token cimport Token from .token cimport Token
from ..structs cimport TokenC, LexemeC from ..structs cimport TokenC, LexemeC
from ..typedefs cimport flags_t, attr_t, hash_t from ..typedefs cimport flags_t, attr_t, hash_t
from ..attrs cimport attr_id_t from ..attrs cimport attr_id_t, ORTH
from ..parts_of_speech cimport univ_pos_t from ..parts_of_speech cimport univ_pos_t
from ..attrs cimport * from ..attrs cimport *
from ..lexeme cimport Lexeme from ..lexeme cimport Lexeme
@ -341,7 +341,7 @@ cdef class Span:
""" """
if "similarity" in self.doc.user_span_hooks: if "similarity" in self.doc.user_span_hooks:
return self.doc.user_span_hooks["similarity"](self, other) return self.doc.user_span_hooks["similarity"](self, other)
attr = self.doc.vocab.vectors.attr attr = getattr(self.doc.vocab.vectors, "attr", ORTH)
cdef Token this_token cdef Token this_token
cdef Token other_token cdef Token other_token
cdef Lexeme other_lex cdef Lexeme other_lex

View File

@ -13,7 +13,7 @@ from ..lexeme cimport Lexeme
from ..attrs cimport IS_ALPHA, IS_ASCII, IS_DIGIT, IS_LOWER, IS_PUNCT, IS_SPACE from ..attrs cimport IS_ALPHA, IS_ASCII, IS_DIGIT, IS_LOWER, IS_PUNCT, IS_SPACE
from ..attrs cimport IS_BRACKET, IS_QUOTE, IS_LEFT_PUNCT, IS_RIGHT_PUNCT from ..attrs cimport IS_BRACKET, IS_QUOTE, IS_LEFT_PUNCT, IS_RIGHT_PUNCT
from ..attrs cimport IS_TITLE, IS_UPPER, IS_CURRENCY, IS_STOP from ..attrs cimport IS_TITLE, IS_UPPER, IS_CURRENCY, IS_STOP
from ..attrs cimport LIKE_URL, LIKE_NUM, LIKE_EMAIL from ..attrs cimport LIKE_URL, LIKE_NUM, LIKE_EMAIL, ORTH
from ..symbols cimport conj from ..symbols cimport conj
from .morphanalysis cimport MorphAnalysis from .morphanalysis cimport MorphAnalysis
from .doc cimport set_children_from_heads from .doc cimport set_children_from_heads
@ -197,7 +197,7 @@ cdef class Token:
""" """
if "similarity" in self.doc.user_token_hooks: if "similarity" in self.doc.user_token_hooks:
return self.doc.user_token_hooks["similarity"](self, other) return self.doc.user_token_hooks["similarity"](self, other)
attr = self.doc.vocab.vectors.attr attr = getattr(self.doc.vocab.vectors, "attr", ORTH)
cdef Token this_token = self cdef Token this_token = self
cdef Token other_token cdef Token other_token
cdef Lexeme other_lex cdef Lexeme other_lex