Merge pull request #625 from pspiegelhalter/master

PR to fix Issue #623
This commit is contained in:
Matthew Honnibal 2016-11-12 23:04:36 +11:00 committed by GitHub
commit 2ee66117ba
2 changed files with 7 additions and 3 deletions

View File

@ -7,6 +7,7 @@ from pathlib import Path
from spacy_hook import get_embeddings, get_word_ids
from spacy_hook import create_similarity_pipeline
from keras_decomposable_attention import build_model
def train(model_dir, train_loc, dev_loc, shape, settings):
print("Loading spaCy")

View File

@ -101,10 +101,11 @@ class _Attention(object):
self.model = TimeDistributed(self.model)
def __call__(self, sent1, sent2):
def _outer((A, B)):
att_ji = T.batched_dot(B, A.dimshuffle((0, 2, 1)))
def _outer(AB):
att_ji = T.batched_dot(AB[1], AB[0].dimshuffle((0, 2, 1)))
return att_ji.dimshuffle((0, 2, 1))
return merge(
[self.model(sent1), self.model(sent2)],
mode=_outer,
@ -117,7 +118,9 @@ class _SoftAlignment(object):
self.nr_hidden = nr_hidden
def __call__(self, sentence, attention, transpose=False):
def _normalize_attention((att, mat)):
def _normalize_attention(attmat):
att = attmat[0]
mat = attmat[1]
if transpose:
att = att.dimshuffle((0, 2, 1))
# 3d softmax