From a592075720f027fdefd6858a1da04db5353b7de7 Mon Sep 17 00:00:00 2001 From: Chen Buskilla Date: Sun, 13 Nov 2016 14:53:55 +0200 Subject: [PATCH] fix parikh entailment test methods bug with settings --- .../keras_parikh_entailment/keras_decomposable_attention.py | 4 ++-- 1 file changed, 2 insertions(+), 2 deletions(-) diff --git a/examples/keras_parikh_entailment/keras_decomposable_attention.py b/examples/keras_parikh_entailment/keras_decomposable_attention.py index 80eac2da7..06a352f91 100644 --- a/examples/keras_parikh_entailment/keras_decomposable_attention.py +++ b/examples/keras_parikh_entailment/keras_decomposable_attention.py @@ -199,7 +199,7 @@ class _GlobalSumPooling1D(Layer): def test_build_model(): vectors = numpy.ndarray((100, 8), dtype='float32') shape = (10, 16, 3) - settings = {'lr': 0.001, 'dropout': 0.2} + settings = {'lr': 0.001, 'dropout': 0.2, 'gru_encode':True} model = build_model(vectors, shape, settings) @@ -220,7 +220,7 @@ def test_fit_model(): vectors = numpy.ndarray((100, 8), dtype='float32') shape = (10, 16, 3) - settings = {'lr': 0.001, 'dropout': 0.2} + settings = {'lr': 0.001, 'dropout': 0.2, 'gru_encode':True} model = build_model(vectors, shape, settings) train_X = _generate_X(20, shape[0], vectors.shape[1])