2017-10-03 15:26:20 +03:00
|
|
|
//- 💫 DOCS > USAGE > WORD VECTORS & SIMILARITIES
|
|
|
|
|
|
|
|
include ../_includes/_mixins
|
|
|
|
|
|
|
|
+section("basics")
|
2018-04-29 03:06:46 +03:00
|
|
|
+aside("Training word vectors")
|
|
|
|
| Dense, real valued vectors representing distributional similarity
|
|
|
|
| information are now a cornerstone of practical NLP. The most common way
|
|
|
|
| to train these vectors is the #[+a("https://en.wikipedia.org/wiki/Word2vec") word2vec]
|
|
|
|
| family of algorithms. If you need to train a word2vec model, we recommend
|
|
|
|
| the implementation in the Python library
|
|
|
|
| #[+a("https://radimrehurek.com/gensim/") Gensim].
|
|
|
|
|
|
|
|
include _spacy-101/_similarity
|
|
|
|
include _spacy-101/_word-vectors
|
2017-10-03 15:26:20 +03:00
|
|
|
|
|
|
|
+section("custom")
|
|
|
|
+h(2, "custom") Customising word vectors
|
|
|
|
include _vectors-similarity/_custom
|
|
|
|
|
|
|
|
+section("gpu")
|
|
|
|
+h(2, "gpu") Storing vectors on a GPU
|
|
|
|
include _vectors-similarity/_gpu
|