mirror of
https://github.com/explosion/spaCy.git
synced 2025-07-11 00:32:40 +03:00
Switch to ReLu layers in Tok2Vec
This commit is contained in:
parent
bbace204be
commit
ac2de6dced
13
spacy/_ml.py
13
spacy/_ml.py
|
@ -220,12 +220,12 @@ def Tok2Vec(width, embed_size, preprocess=None):
|
||||||
tok2vec = (
|
tok2vec = (
|
||||||
with_flatten(
|
with_flatten(
|
||||||
asarray(Model.ops, dtype='uint64')
|
asarray(Model.ops, dtype='uint64')
|
||||||
>> uniqued(embed, column=5)
|
>> embed
|
||||||
>> LN(Maxout(width, width*4, pieces=3))
|
>> LN(Maxout(width, width*4, pieces=3))
|
||||||
>> Residual(ExtractWindow(nW=1) >> LN(Maxout(width, width*3)))
|
>> Residual(ExtractWindow(nW=1) >> ReLu(width, width*3))
|
||||||
>> Residual(ExtractWindow(nW=1) >> Maxout(width, width*3))
|
>> Residual(ExtractWindow(nW=1) >> ReLu(width, width*3))
|
||||||
>> Residual(ExtractWindow(nW=1) >> Maxout(width, width*3))
|
>> Residual(ExtractWindow(nW=1) >> ReLu(width, width*3))
|
||||||
>> Residual(ExtractWindow(nW=1) >> Maxout(width, width*3)),
|
>> Residual(ExtractWindow(nW=1) >> ReLu(width, width*3)),
|
||||||
pad=4)
|
pad=4)
|
||||||
)
|
)
|
||||||
if preprocess not in (False, None):
|
if preprocess not in (False, None):
|
||||||
|
@ -321,7 +321,8 @@ def zero_init(model):
|
||||||
|
|
||||||
|
|
||||||
def doc2feats(cols=None):
|
def doc2feats(cols=None):
|
||||||
cols = [ID, NORM, PREFIX, SUFFIX, SHAPE, ORTH]
|
if cols is None:
|
||||||
|
cols = [ID, NORM, PREFIX, SUFFIX, SHAPE, ORTH]
|
||||||
def forward(docs, drop=0.):
|
def forward(docs, drop=0.):
|
||||||
feats = []
|
feats = []
|
||||||
for doc in docs:
|
for doc in docs:
|
||||||
|
|
Loading…
Reference in New Issue
Block a user