mirror of
https://github.com/explosion/spaCy.git
synced 2025-07-18 12:12:20 +03:00
Hack for transformer listener size
This commit is contained in:
parent
7811a1194b
commit
6974f55daa
|
@ -32,7 +32,12 @@ def build_wl_coref_model(
|
||||||
# span predictor embeddings
|
# span predictor embeddings
|
||||||
sp_embedding_size: int = 64,
|
sp_embedding_size: int = 64,
|
||||||
):
|
):
|
||||||
dim = tok2vec.get_dim("nO")
|
# TODO fix this
|
||||||
|
try:
|
||||||
|
dim = tok2vec.get_dim("nO")
|
||||||
|
except ValueError:
|
||||||
|
# happens with transformer listener
|
||||||
|
dim = 768
|
||||||
|
|
||||||
with Model.define_operators({">>": chain}):
|
with Model.define_operators({">>": chain}):
|
||||||
# TODO chain tok2vec with these models
|
# TODO chain tok2vec with these models
|
||||||
|
|
Loading…
Reference in New Issue
Block a user