spaCy/spacy/tests/test_misc.py
Matthew Honnibal 8661218fe8
Refactor parser (#2308)
* Work on refactoring greedy parser

* Compile updated parser

* Fix refactored parser

* Update test

* Fix refactored parser

* Fix refactored parser

* Readd beam search after refactor

* Fix beam search after refactor

* Fix parser

* Fix beam parsing

* Support oracle segmentation in ud-train CLI command

* Avoid relying on final gold check in beam search

* Add a keyword argument sink to GoldParse

* Bug fixes to beam search after refactor

* Avoid importing fused token symbol in ud-run-test, untl that's added

* Avoid importing fused token symbol in ud-run-test, untl that's added

* Don't modify Token in global scope

* Fix error in beam gradient calculation

* Default to beam_update_prob 1

* Set a more aggressive threshold on the max violn update

* Disable some tests to figure out why CI fails

* Disable some tests to figure out why CI fails

* Add some diagnostics to travis.yml to try to figure out why build fails

* Tell Thinc to link against system blas on Travis

* Point thinc to libblas on Travis

* Try running sudo=true for travis

* Unhack travis.sh

* Restore beam_density argument for parser beam

* Require thinc 6.11.1.dev16

* Revert hacks to tests

* Revert hacks to travis.yml

* Update thinc requirement

* Fix parser model loading

* Fix size limits in training data

* Add missing name attribute for parser

* Fix appveyor for Windows
2018-05-15 22:17:29 +02:00

94 lines
3.3 KiB
Python

# coding: utf-8
from __future__ import unicode_literals
from ..util import ensure_path
from .. import util
from ..displacy import parse_deps, parse_ents
from ..tokens import Span
from .util import get_doc
from .._ml import PrecomputableAffine
from pathlib import Path
import pytest
from thinc.neural._classes.maxout import Maxout
from thinc.neural._classes.softmax import Softmax
from thinc.api import chain
@pytest.mark.parametrize('text', ['hello/world', 'hello world'])
def test_util_ensure_path_succeeds(text):
path = util.ensure_path(text)
assert isinstance(path, Path)
@pytest.mark.parametrize('package', ['numpy'])
def test_util_is_package(package):
"""Test that an installed package via pip is recognised by util.is_package."""
assert util.is_package(package)
@pytest.mark.parametrize('package', ['thinc'])
def test_util_get_package_path(package):
"""Test that a Path object is returned for a package name."""
path = util.get_package_path(package)
assert isinstance(path, Path)
@pytest.mark.xfail
def test_displacy_parse_ents(en_vocab):
"""Test that named entities on a Doc are converted into displaCy's format."""
doc = get_doc(en_vocab, words=["But", "Google", "is", "starting", "from", "behind"])
doc.ents = [Span(doc, 1, 2, label=doc.vocab.strings[u'ORG'])]
ents = parse_ents(doc)
assert isinstance(ents, dict)
assert ents['text'] == 'But Google is starting from behind '
assert ents['ents'] == [{'start': 4, 'end': 10, 'label': 'ORG'}]
@pytest.mark.xfail
def test_displacy_parse_deps(en_vocab):
"""Test that deps and tags on a Doc are converted into displaCy's format."""
words = ["This", "is", "a", "sentence"]
heads = [1, 0, 1, -2]
pos = ['DET', 'VERB', 'DET', 'NOUN']
tags = ['DT', 'VBZ', 'DT', 'NN']
deps = ['nsubj', 'ROOT', 'det', 'attr']
doc = get_doc(en_vocab, words=words, heads=heads, pos=pos, tags=tags,
deps=deps)
deps = parse_deps(doc)
assert isinstance(deps, dict)
assert deps['words'] == [{'text': 'This', 'tag': 'DET'},
{'text': 'is', 'tag': 'VERB'},
{'text': 'a', 'tag': 'DET'},
{'text': 'sentence', 'tag': 'NOUN'}]
assert deps['arcs'] == [{'start': 0, 'end': 1, 'label': 'nsubj', 'dir': 'left'},
{'start': 2, 'end': 3, 'label': 'det', 'dir': 'left'},
{'start': 1, 'end': 3, 'label': 'attr', 'dir': 'right'}]
@pytest.mark.xfail
def test_PrecomputableAffine(nO=4, nI=5, nF=3, nP=2):
model = PrecomputableAffine(nO=nO, nI=nI, nF=nF, nP=nP)
assert model.W.shape == (nF, nO, nP, nI)
tensor = model.ops.allocate((10, nI))
Y, get_dX = model.begin_update(tensor)
assert Y.shape == (tensor.shape[0]+1, nF, nO, nP)
assert model.d_pad.shape == (1, nF, nO, nP)
dY = model.ops.allocate((15, nO, nP))
ids = model.ops.allocate((15, nF))
ids[1,2] = -1
dY[1] = 1
assert model.d_pad[0, 2, 0, 0] == 0.
model._backprop_padding(dY, ids)
assert model.d_pad[0, 2, 0, 0] == 1.
model.d_pad.fill(0.)
ids.fill(0.)
dY.fill(0.)
ids[1,2] = -1
ids[1,1] = -1
ids[1,0] = -1
dY[1] = 1
assert model.d_pad[0, 2, 0, 0] == 0.
model._backprop_padding(dY, ids)
assert model.d_pad[0, 2, 0, 0] == 3.