diff --git a/spacy/language.py b/spacy/language.py index 97a317101..6c0a8394d 100644 --- a/spacy/language.py +++ b/spacy/language.py @@ -8,7 +8,7 @@ from contextlib import contextmanager from copy import deepcopy from pathlib import Path import warnings -from thinc.api import Model, get_current_ops, Config, require_gpu, Optimizer +from thinc.api import Model, get_current_ops, Config, Optimizer import srsly import multiprocessing as mp from itertools import chain, cycle @@ -1153,10 +1153,9 @@ class Language: get_examples: Optional[Callable[[], Iterable[Example]]] = None, *, sgd: Optional[Optimizer] = None, - device: int = -1, ) -> Optimizer: warnings.warn(Warnings.W089, DeprecationWarning) - return self.initialize(get_examples, sgd=sgd, device=device) + return self.initialize(get_examples, sgd=sgd) def initialize( self, @@ -1169,7 +1168,7 @@ class Language: get_examples (Callable[[], Iterable[Example]]): Optional function that returns gold-standard Example objects. - sgd (Optional[Optimizer]): An optimizer to use for updates. If not + sgd (Optional[Optimizer]): An optimizer to use for updates. If not provided, will be created using the .create_optimizer() method. RETURNS (thinc.api.Optimizer): The optimizer. @@ -1220,7 +1219,6 @@ class Language: proc.initialize, p_settings, section="components", name=name ) proc.initialize( - get_examples, pipeline=self.pipeline get_examples, pipeline=self.pipeline, **p_settings, @@ -1315,7 +1313,7 @@ class Language: n_words = sum(len(doc) for doc in docs) results["speed"] = n_words / (end_time - start_time) return results - + def create_optimizer(self): """Create an optimizer, usually using the [training.optimizer] config.""" subconfig = {"optimizer": self.config["training"]["optimizer"]} diff --git a/spacy/pipeline/dep_parser.pyx b/spacy/pipeline/dep_parser.pyx index 95effac59..eedb4cba9 100644 --- a/spacy/pipeline/dep_parser.pyx +++ b/spacy/pipeline/dep_parser.pyx @@ -132,7 +132,7 @@ cdef class DependencyParser(Parser): labeller.model.set_dim("nO", len(self.labels)) if labeller.model.has_ref("output_layer"): labeller.model.get_ref("output_layer").set_dim("nO", len(self.labels)) - labeller.initialize(get_examples, pipeline=pipeline, sgd=sgd) + labeller.initialize(get_examples, pipeline=pipeline) @property def labels(self): diff --git a/spacy/pipeline/sentencizer.pyx b/spacy/pipeline/sentencizer.pyx index 0f49033ff..3cd480d20 100644 --- a/spacy/pipeline/sentencizer.pyx +++ b/spacy/pipeline/sentencizer.pyx @@ -58,7 +58,7 @@ class Sentencizer(Pipe): else: self.punct_chars = set(self.default_punct_chars) - def initialize(self, get_examples, pipeline=None, sgd=None): + def initialize(self, get_examples, pipeline=None): pass def __call__(self, doc): diff --git a/spacy/schemas.py b/spacy/schemas.py index 594fc92ad..e183e0a75 100644 --- a/spacy/schemas.py +++ b/spacy/schemas.py @@ -107,7 +107,7 @@ def validate_init_settings( *, section: Optional[str] = None, name: str = "", - exclude: Iterable[str] = ("get_examples", "nlp", "pipeline", "sgd"), + exclude: Iterable[str] = ("get_examples", "nlp", "pipeline"), ) -> Dict[str, Any]: """Validate initialization settings against the expected arguments in the method signature. Will parse values if possible (e.g. int to string) diff --git a/spacy/training/initialize.py b/spacy/training/initialize.py index b42732d48..498fd890c 100644 --- a/spacy/training/initialize.py +++ b/spacy/training/initialize.py @@ -55,7 +55,7 @@ def init_nlp(config: Config, *, use_gpu: int = -1, silent: bool = True) -> Langu msg.info(f"Resuming training for: {resume_components}") nlp.resume_training(sgd=optimizer) with nlp.select_pipes(disable=[*frozen_components, *resume_components]): - nlp.initialize(lambda: train_corpus(nlp), sgd=optimizer, settings=I) + nlp.initialize(lambda: train_corpus(nlp), settings=I) msg.good("Initialized pipeline components") # Verify the config after calling 'initialize' to ensure labels # are properly initialized