mirror of
https://github.com/explosion/spaCy.git
synced 2024-12-26 01:46:28 +03:00
Docs: clarify abstract spacy.load examples (#12889)
This commit is contained in:
parent
64b8ee2dbe
commit
76a9f9c6c6
|
@ -68,7 +68,7 @@ weights, and returns it.
|
||||||
cls = spacy.util.get_lang_class(lang) # 1. Get Language class, e.g. English
|
cls = spacy.util.get_lang_class(lang) # 1. Get Language class, e.g. English
|
||||||
nlp = cls() # 2. Initialize it
|
nlp = cls() # 2. Initialize it
|
||||||
for name in pipeline:
|
for name in pipeline:
|
||||||
nlp.add_pipe(name) # 3. Add the component to the pipeline
|
nlp.add_pipe(name, config={...}) # 3. Add the component to the pipeline
|
||||||
nlp.from_disk(data_path) # 4. Load in the binary data
|
nlp.from_disk(data_path) # 4. Load in the binary data
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
|
@ -244,7 +244,7 @@ tagging pipeline. This is also why the pipeline state is always held by the
|
||||||
together and returns an instance of `Language` with a pipeline set and access to
|
together and returns an instance of `Language` with a pipeline set and access to
|
||||||
the binary data:
|
the binary data:
|
||||||
|
|
||||||
```python {title="spacy.load under the hood"}
|
```python {title="spacy.load under the hood (abstract example)"}
|
||||||
lang = "en"
|
lang = "en"
|
||||||
pipeline = ["tok2vec", "tagger", "parser", "ner", "attribute_ruler", "lemmatizer"]
|
pipeline = ["tok2vec", "tagger", "parser", "ner", "attribute_ruler", "lemmatizer"]
|
||||||
data_path = "path/to/en_core_web_sm/en_core_web_sm-3.0.0"
|
data_path = "path/to/en_core_web_sm/en_core_web_sm-3.0.0"
|
||||||
|
@ -252,7 +252,7 @@ data_path = "path/to/en_core_web_sm/en_core_web_sm-3.0.0"
|
||||||
cls = spacy.util.get_lang_class(lang) # 1. Get Language class, e.g. English
|
cls = spacy.util.get_lang_class(lang) # 1. Get Language class, e.g. English
|
||||||
nlp = cls() # 2. Initialize it
|
nlp = cls() # 2. Initialize it
|
||||||
for name in pipeline:
|
for name in pipeline:
|
||||||
nlp.add_pipe(name) # 3. Add the component to the pipeline
|
nlp.add_pipe(name, config={...}) # 3. Add the component to the pipeline
|
||||||
nlp.from_disk(data_path) # 4. Load in the binary data
|
nlp.from_disk(data_path) # 4. Load in the binary data
|
||||||
```
|
```
|
||||||
|
|
||||||
|
|
Loading…
Reference in New Issue
Block a user