mirror of
https://github.com/explosion/spaCy.git
synced 2025-01-26 09:14:32 +03:00
remove link_components flag again (#6883)
This commit is contained in:
parent
e97d3f3c69
commit
f638306598
|
@ -1190,7 +1190,6 @@ class Language:
|
||||||
get_examples: Optional[Callable[[], Iterable[Example]]] = None,
|
get_examples: Optional[Callable[[], Iterable[Example]]] = None,
|
||||||
*,
|
*,
|
||||||
sgd: Optional[Optimizer] = None,
|
sgd: Optional[Optimizer] = None,
|
||||||
link_components: bool = True,
|
|
||||||
) -> Optimizer:
|
) -> Optimizer:
|
||||||
"""Initialize the pipe for training, using data examples if available.
|
"""Initialize the pipe for training, using data examples if available.
|
||||||
|
|
||||||
|
@ -1198,8 +1197,6 @@ class Language:
|
||||||
returns gold-standard Example objects.
|
returns gold-standard Example objects.
|
||||||
sgd (Optional[Optimizer]): An optimizer to use for updates. If not
|
sgd (Optional[Optimizer]): An optimizer to use for updates. If not
|
||||||
provided, will be created using the .create_optimizer() method.
|
provided, will be created using the .create_optimizer() method.
|
||||||
link_components (bool): Link listener components automatically or not
|
|
||||||
(default True)
|
|
||||||
RETURNS (thinc.api.Optimizer): The optimizer.
|
RETURNS (thinc.api.Optimizer): The optimizer.
|
||||||
|
|
||||||
DOCS: https://spacy.io/api/language#initialize
|
DOCS: https://spacy.io/api/language#initialize
|
||||||
|
@ -1247,8 +1244,7 @@ class Language:
|
||||||
proc.initialize, p_settings, section="components", name=name
|
proc.initialize, p_settings, section="components", name=name
|
||||||
)
|
)
|
||||||
proc.initialize(get_examples, nlp=self, **p_settings)
|
proc.initialize(get_examples, nlp=self, **p_settings)
|
||||||
if link_components:
|
self._link_components()
|
||||||
self._link_components()
|
|
||||||
self._optimizer = sgd
|
self._optimizer = sgd
|
||||||
if sgd is not None:
|
if sgd is not None:
|
||||||
self._optimizer = sgd
|
self._optimizer = sgd
|
||||||
|
|
|
@ -80,7 +80,8 @@ class Tok2Vec(TrainablePipe):
|
||||||
def add_listener(self, listener: "Tok2VecListener", component_name: str) -> None:
|
def add_listener(self, listener: "Tok2VecListener", component_name: str) -> None:
|
||||||
"""Add a listener for a downstream component. Usually internals."""
|
"""Add a listener for a downstream component. Usually internals."""
|
||||||
self.listener_map.setdefault(component_name, [])
|
self.listener_map.setdefault(component_name, [])
|
||||||
self.listener_map[component_name].append(listener)
|
if listener not in self.listener_map[component_name]:
|
||||||
|
self.listener_map[component_name].append(listener)
|
||||||
|
|
||||||
def remove_listener(self, listener: "Tok2VecListener", component_name: str) -> bool:
|
def remove_listener(self, listener: "Tok2VecListener", component_name: str) -> bool:
|
||||||
"""Remove a listener for a downstream component. Usually internals."""
|
"""Remove a listener for a downstream component. Usually internals."""
|
||||||
|
|
|
@ -67,7 +67,7 @@ def init_nlp(config: Config, *, use_gpu: int = -1) -> "Language":
|
||||||
# Make sure that listeners are defined before initializing further
|
# Make sure that listeners are defined before initializing further
|
||||||
nlp._link_components()
|
nlp._link_components()
|
||||||
with nlp.select_pipes(disable=[*frozen_components, *resume_components]):
|
with nlp.select_pipes(disable=[*frozen_components, *resume_components]):
|
||||||
nlp.initialize(lambda: train_corpus(nlp), sgd=optimizer, link_components=False)
|
nlp.initialize(lambda: train_corpus(nlp), sgd=optimizer)
|
||||||
logger.info(f"Initialized pipeline components: {nlp.pipe_names}")
|
logger.info(f"Initialized pipeline components: {nlp.pipe_names}")
|
||||||
# Detect components with listeners that are not frozen consistently
|
# Detect components with listeners that are not frozen consistently
|
||||||
for name, proc in nlp.pipeline:
|
for name, proc in nlp.pipeline:
|
||||||
|
|
Loading…
Reference in New Issue
Block a user