mirror of
https://github.com/explosion/spaCy.git
synced 2024-12-26 18:06:29 +03:00
Fix train-from-config
This commit is contained in:
parent
24efd54a42
commit
fda7355508
|
@ -226,7 +226,7 @@ def train_from_config(
|
||||||
def create_train_batches(nlp, corpus, cfg):
|
def create_train_batches(nlp, corpus, cfg):
|
||||||
is_first = True
|
is_first = True
|
||||||
while True:
|
while True:
|
||||||
train_examples = corpus.train_dataset(
|
train_examples = list(corpus.train_dataset(
|
||||||
nlp,
|
nlp,
|
||||||
noise_level=0.0,
|
noise_level=0.0,
|
||||||
orth_variant_level=cfg["orth_variant_level"],
|
orth_variant_level=cfg["orth_variant_level"],
|
||||||
|
@ -324,7 +324,6 @@ def train_while_improving(
|
||||||
for subbatch in subdivide_batch(batch, accumulate_gradient):
|
for subbatch in subdivide_batch(batch, accumulate_gradient):
|
||||||
nlp.update(subbatch, drop=dropout, losses=losses, sgd=False)
|
nlp.update(subbatch, drop=dropout, losses=losses, sgd=False)
|
||||||
for name, proc in nlp.pipeline:
|
for name, proc in nlp.pipeline:
|
||||||
for name, proc in nlp.pipeline:
|
|
||||||
if hasattr(proc, "model"):
|
if hasattr(proc, "model"):
|
||||||
proc.model.finish_update(optimizer)
|
proc.model.finish_update(optimizer)
|
||||||
optimizer.step_schedules()
|
optimizer.step_schedules()
|
||||||
|
|
Loading…
Reference in New Issue
Block a user