Bugfix linking vectors (#5196)

* restore call to _load_vectors

* bump to thinc 8.0.0a3

* bump to 3.0.0.dev4
This commit is contained in:
Sofie Van Landeghem 2020-03-25 10:20:11 +01:00 committed by GitHub
parent fcac1ace78
commit 218e1706ac
No known key found for this signature in database
GPG Key ID: 4AEE18F83AFDEB23
6 changed files with 8 additions and 6 deletions

View File

@ -6,7 +6,7 @@ requires = [
"cymem>=2.0.2,<2.1.0",
"preshed>=3.0.2,<3.1.0",
"murmurhash>=0.28.0,<1.1.0",
"thinc==8.0.0a1",
"thinc==8.0.0a3",
"blis>=0.4.0,<0.5.0"
]
build-backend = "setuptools.build_meta"

View File

@ -1,7 +1,7 @@
# Our libraries
cymem>=2.0.2,<2.1.0
preshed>=3.0.2,<3.1.0
thinc==8.0.0a1
thinc==8.0.0a3
blis>=0.4.0,<0.5.0
ml_datasets>=0.1.1
murmurhash>=0.28.0,<1.1.0

View File

@ -36,13 +36,13 @@ setup_requires =
cymem>=2.0.2,<2.1.0
preshed>=3.0.2,<3.1.0
murmurhash>=0.28.0,<1.1.0
thinc==8.0.0a1
thinc==8.0.0a3
install_requires =
# Our libraries
murmurhash>=0.28.0,<1.1.0
cymem>=2.0.2,<2.1.0
preshed>=3.0.2,<3.1.0
thinc==8.0.0a1
thinc==8.0.0a3
blis>=0.4.0,<0.5.0
wasabi>=0.4.0,<1.1.0
srsly>=2.0.0,<3.0.0

View File

@ -1,6 +1,6 @@
# fmt: off
__title__ = "spacy"
__version__ = "3.0.0.dev3"
__version__ = "3.0.0.dev4"
__release__ = True
__download_url__ = "https://github.com/explosion/spacy-models/releases/download"
__compatibility__ = "https://raw.githubusercontent.com/explosion/spacy-models/master/compatibility.json"

View File

@ -143,6 +143,7 @@ def train(
)
if vectors:
msg.text(f"Loading vectors from model '{vectors}'")
_load_vectors(nlp, vectors)
nlp.disable_pipes([p for p in nlp.pipe_names if p not in pipeline])
for pipe in pipeline:
@ -210,6 +211,7 @@ def train(
if vectors:
msg.text(f"Loading vectors from model '{vectors}'")
_load_vectors(nlp, vectors)
for pipe in pipeline:
# first, create the model.

View File

@ -250,7 +250,7 @@ class ParserModel(Model):
nI = smaller.get_dim("nI")
with use_ops('numpy'):
larger = Linear(nO=new_nO, nI=nI)
larger._init = smaller._init
larger.init = smaller.init
# it could be that the model is not initialized yet, then skip this bit
if nI:
larger_W = larger.ops.alloc2f(new_nO, nI)