mirror of
https://github.com/explosion/spaCy.git
synced 2024-12-25 17:36:30 +03:00
Update transformer model details [ci skip]
This commit is contained in:
parent
dd30d3ec99
commit
8f76d6c9ef
|
@ -48,4 +48,6 @@ redirects = [
|
|||
{from = "/api/sentencesegmenter", to="/api/sentencizer"},
|
||||
{from = "/universe", to = "/universe/project/:id", query = {id = ":id"}, force = true},
|
||||
{from = "/universe", to = "/universe/category/:category", query = {category = ":category"}, force = true},
|
||||
# Renamed universe projects
|
||||
{from = "/universe/project/spacy-pytorch-transformers", to = "/universe/project/spacy-transformers", force = true}
|
||||
]
|
||||
|
|
|
@ -8,10 +8,10 @@
|
|||
"en_core_web_md",
|
||||
"en_core_web_lg",
|
||||
"en_vectors_web_lg",
|
||||
"en_pytt_bertbaseuncased_lg",
|
||||
"en_pytt_robertabase_lg",
|
||||
"en_pytt_distilbertbaseuncased_lg",
|
||||
"en_pytt_xlnetbasecased_lg"
|
||||
"en_trf_bertbaseuncased_lg",
|
||||
"en_trf_robertabase_lg",
|
||||
"en_trf_distilbertbaseuncased_lg",
|
||||
"en_trf_xlnetbasecased_lg"
|
||||
],
|
||||
"example": "This is a sentence.",
|
||||
"has_examples": true
|
||||
|
@ -19,7 +19,7 @@
|
|||
{
|
||||
"code": "de",
|
||||
"name": "German",
|
||||
"models": ["de_core_news_sm", "de_core_news_md", "de_pytt_bertbasecased_lg"],
|
||||
"models": ["de_core_news_sm", "de_core_news_md", "de_trf_bertbasecased_lg"],
|
||||
"example": "Dies ist ein Satz.",
|
||||
"has_examples": true
|
||||
},
|
||||
|
|
|
@ -1675,21 +1675,21 @@
|
|||
}
|
||||
},
|
||||
{
|
||||
"id": "spacy-pytorch-transformers",
|
||||
"title": "spacy-pytorch-transformers",
|
||||
"id": "spacy-transformers",
|
||||
"title": "spacy-transformers",
|
||||
"slogan": "spaCy pipelines for pretrained BERT, XLNet and GPT-2",
|
||||
"description": "This package provides spaCy model pipelines that wrap [Hugging Face's `pytorch-transformers`](https://github.com/huggingface/pytorch-transformers) package, so you can use them in spaCy. The result is convenient access to state-of-the-art transformer architectures, such as BERT, GPT-2, XLNet, etc.",
|
||||
"github": "explosion/spacy-pytorch-transformers",
|
||||
"url": "https://explosion.ai/blog/spacy-pytorch-transformers",
|
||||
"pip": "spacy-pytorch-transformers",
|
||||
"description": "This package provides spaCy model pipelines that wrap [Hugging Face's `transformers`](https://github.com/huggingface/transformers) package, so you can use them in spaCy. The result is convenient access to state-of-the-art transformer architectures, such as BERT, GPT-2, XLNet, etc.",
|
||||
"github": "explosion/spacy-transformers",
|
||||
"url": "https://explosion.ai/blog/spacy-transformers",
|
||||
"pip": "spacy-transformers",
|
||||
"category": ["pipeline", "models", "research"],
|
||||
"code_example": [
|
||||
"import spacy",
|
||||
"",
|
||||
"nlp = spacy.load(\"en_pytt_bertbaseuncased_lg\")",
|
||||
"nlp = spacy.load(\"en_trf_bertbaseuncased_lg\")",
|
||||
"doc = nlp(\"Apple shares rose on the news. Apple pie is delicious.\")",
|
||||
"print(doc[0].similarity(doc[7]))",
|
||||
"print(doc._.pytt_last_hidden_state.shape)"
|
||||
"print(doc._.trf_last_hidden_state.shape)"
|
||||
],
|
||||
"author": "Explosion",
|
||||
"author_links": {
|
||||
|
|
|
@ -23,6 +23,7 @@ const MODEL_META = {
|
|||
dep: 'Vocabulary, syntax',
|
||||
ent: 'Named entities',
|
||||
pytt: 'PyTorch Transformers',
|
||||
trf: 'Transformers',
|
||||
vectors: 'Word vectors',
|
||||
web: 'written text (blogs, news, comments)',
|
||||
news: 'written text (news, media)',
|
||||
|
|
Loading…
Reference in New Issue
Block a user