spaCy/spacy/tests/parser/test_neural_parser.py
Matthew Honnibal 8661218fe8
Refactor parser (#2308)
* Work on refactoring greedy parser

* Compile updated parser

* Fix refactored parser

* Update test

* Fix refactored parser

* Fix refactored parser

* Readd beam search after refactor

* Fix beam search after refactor

* Fix parser

* Fix beam parsing

* Support oracle segmentation in ud-train CLI command

* Avoid relying on final gold check in beam search

* Add a keyword argument sink to GoldParse

* Bug fixes to beam search after refactor

* Avoid importing fused token symbol in ud-run-test, untl that's added

* Avoid importing fused token symbol in ud-run-test, untl that's added

* Don't modify Token in global scope

* Fix error in beam gradient calculation

* Default to beam_update_prob 1

* Set a more aggressive threshold on the max violn update

* Disable some tests to figure out why CI fails

* Disable some tests to figure out why CI fails

* Add some diagnostics to travis.yml to try to figure out why build fails

* Tell Thinc to link against system blas on Travis

* Point thinc to libblas on Travis

* Try running sudo=true for travis

* Unhack travis.sh

* Restore beam_density argument for parser beam

* Require thinc 6.11.1.dev16

* Revert hacks to tests

* Revert hacks to travis.yml

* Update thinc requirement

* Fix parser model loading

* Fix size limits in training data

* Add missing name attribute for parser

* Fix appveyor for Windows
2018-05-15 22:17:29 +02:00

85 lines
1.9 KiB
Python

# coding: utf8
from __future__ import unicode_literals
from thinc.neural import Model
import pytest
import numpy
from ..._ml import chain, Tok2Vec, doc2feats
from ...vocab import Vocab
from ...pipeline import Tensorizer
from ...syntax.arc_eager import ArcEager
from ...syntax.nn_parser import Parser
from ...tokens.doc import Doc
from ...gold import GoldParse
@pytest.fixture
def vocab():
return Vocab()
@pytest.fixture
def arc_eager(vocab):
actions = ArcEager.get_actions(left_labels=['L'], right_labels=['R'])
return ArcEager(vocab.strings, actions)
@pytest.fixture
def tok2vec():
return Tok2Vec(8, 100)
@pytest.fixture
def parser(vocab, arc_eager):
return Parser(vocab, moves=arc_eager, model=None)
@pytest.fixture
def model(arc_eager, tok2vec):
return Parser.Model(arc_eager.n_moves, token_vector_width=tok2vec.nO)[0]
@pytest.fixture
def doc(vocab):
return Doc(vocab, words=['a', 'b', 'c'])
@pytest.fixture
def gold(doc):
return GoldParse(doc, heads=[1, 1, 1], deps=['L', 'ROOT', 'R'])
def test_can_init_nn_parser(parser):
assert parser.model is None
def test_build_model(parser):
parser.model = Parser.Model(parser.moves.n_moves, hist_size=0)[0]
assert parser.model is not None
def test_predict_doc(parser, tok2vec, model, doc):
doc.tensor = tok2vec([doc])[0]
parser.model = model
parser(doc)
def test_update_doc(parser, model, doc, gold):
parser.model = model
def optimize(weights, gradient, key=None):
weights -= 0.001 * gradient
parser.update([doc], [gold], sgd=optimize)
@pytest.mark.xfail
def test_predict_doc_beam(parser, model, doc):
parser.model = model
parser(doc, beam_width=32, beam_density=0.001)
@pytest.mark.xfail
def test_update_doc_beam(parser, model, doc, gold):
parser.model = model
def optimize(weights, gradient, key=None):
weights -= 0.001 * gradient
parser.update_beam([doc], [gold], sgd=optimize)