diff --git a/spacy/tests/training/test_pretraining.py b/spacy/tests/training/test_pretraining.py index 7e15810d5..1046db32f 100644 --- a/spacy/tests/training/test_pretraining.py +++ b/spacy/tests/training/test_pretraining.py @@ -181,7 +181,7 @@ def test_pretraining_tok2vec_characters(objective): pretrain(filled, tmp_dir) assert Path(tmp_dir / "model0.bin").exists() assert Path(tmp_dir / "model4.bin").exists() - assert Path(tmp_dir / "model_last.bin").exists() + assert Path(tmp_dir / "model-last.bin").exists() assert not Path(tmp_dir / "model5.bin").exists() @@ -238,7 +238,7 @@ def test_pretraining_tagger_tok2vec(config): pretrain(filled, tmp_dir) assert Path(tmp_dir / "model0.bin").exists() assert Path(tmp_dir / "model4.bin").exists() - assert Path(tmp_dir / "model_last.bin").exists() + assert Path(tmp_dir / "model-last.bin").exists() assert not Path(tmp_dir / "model5.bin").exists() diff --git a/spacy/training/pretrain.py b/spacy/training/pretrain.py index 339a10dbc..b7d30e59e 100644 --- a/spacy/training/pretrain.py +++ b/spacy/training/pretrain.py @@ -80,8 +80,8 @@ def pretrain( # TODO: I think we probably want this to look more like the # 'create_train_batches' function? - for epoch in range(epoch_resume, P["max_epochs"]): - try: + try: + for epoch in range(epoch_resume, P["max_epochs"]): for batch_id, batch in enumerate(batcher(corpus(nlp))): docs = ensure_docs(batch) loss = make_update(model, docs, optimizer, objective) @@ -97,8 +97,8 @@ def pretrain( else: _save_model(epoch) tracker.epoch_loss = 0.0 - finally: - _save_model(epoch, is_last=True) + finally: + _save_model(P["max_epochs"], is_last=True) def ensure_docs(examples_or_docs: Iterable[Union[Doc, Example]]) -> List[Doc]: