mirror of
https://github.com/explosion/spaCy.git
synced 2025-08-10 15:14:56 +03:00
Update tests
This commit is contained in:
parent
06a5be9dfd
commit
e6bacc26cb
|
@ -2,27 +2,40 @@
|
|||
align in tokenization.'''
|
||||
from __future__ import unicode_literals
|
||||
import pytest
|
||||
from collections import Counter
|
||||
from ...tokens import Doc
|
||||
from ...gold import GoldParse
|
||||
from ...gold import _flatten_fused_heads
|
||||
from ...vocab import Vocab
|
||||
|
||||
@pytest.mark.parametrize('fused,flat', [
|
||||
([ [(0, 1), 1], 1], [1, 2, 2]),
|
||||
([1, 1, [1, 3], 1], [1, 1, 1, 4, 1])
|
||||
])
|
||||
def test_flatten_fused_heads(fused, flat):
|
||||
assert _flatten_fused_heads(fused) == flat
|
||||
|
||||
|
||||
def test_over_segmented():
|
||||
doc = Doc(Vocab(), words=['a', 'b', 'c'])
|
||||
doc = Doc(Vocab(), words=['a', 'b', 'c'])
|
||||
gold = GoldParse(doc, words=['ab', 'c'], heads=[1,1])
|
||||
assert gold.heads == [1, 2, 2]
|
||||
assert gold._alignment._y2t == [(0, 0), (0, 1), 1]
|
||||
assert gold.labels == ['subtok', None, None]
|
||||
|
||||
assert gold.heads == [1, 2, 2]
|
||||
|
||||
|
||||
def test_under_segmented():
|
||||
doc = Doc(Vocab(), words=['ab', 'c'])
|
||||
doc = Doc(Vocab(), words=['ab', 'c'])
|
||||
gold = GoldParse(doc, words=['a', 'b', 'c'], heads=[2,2,2])
|
||||
assert gold.heads == [[1,1], 1]
|
||||
assert gold.labels == [[None, None], None]
|
||||
|
||||
def test_over_segmented_heads():
|
||||
doc = Doc(Vocab(), words=['a', 'b', 'c', 'd', 'e'])
|
||||
gold = GoldParse(doc, words=['a', 'bc', 'd', 'e'], heads=[2,2,2,2])
|
||||
assert gold._alignment._y2t == [0, (1, 0), (1, 1), 2, 3]
|
||||
assert gold._alignment._t2y == [0, [1, 2], 3, 4]
|
||||
assert gold.labels == [None, 'subtok', None, None, None]
|
||||
assert gold.heads == [3, 2, 3, 3, 3]
|
||||
|
||||
def test_under_segmented_attach_inside_fused():
|
||||
'''Test arcs point ing into the fused token,
|
||||
e.g. "its good"
|
||||
'''
|
||||
doc = Doc(Vocab(), words=['ab', 'c'])
|
||||
gold = GoldParse(doc, words=['a', 'b', 'c'], heads=[1,1,1])
|
||||
assert gold.heads == [[(0, 1), (0, 1)], (0, 1)]
|
||||
assert gold.labels == [[None, None], None]
|
||||
|
||||
|
|
|
@ -173,6 +173,21 @@ def test_oracle_at_sentence_break(arc_eager, vocab):
|
|||
assert c3['B-ROOT'] == 0.0
|
||||
assert c3['D'] == 9000.0
|
||||
|
||||
|
||||
def test_split_oracle(arc_eager, vocab):
|
||||
gold_words = ['a', 'b', 'c']
|
||||
doc = Doc(vocab, words=['ab', 'c'])
|
||||
heads = [2, 2, 2]
|
||||
deps = ['dep', 'dep', 'ROOT']
|
||||
actions = ['P-1', 'S', 'L-dep', 'S', 'B-ROOT']
|
||||
gold = GoldParse(doc, words=gold_words, heads=heads, deps=deps)
|
||||
assert gold.heads == [[1, 1], 1]
|
||||
assert gold.labels == [['dep', 'dep'], 'ROOT']
|
||||
state = StateClass(doc)
|
||||
M = arc_eager
|
||||
M.preprocess_gold(gold)
|
||||
|
||||
|
||||
annot_tuples = [
|
||||
(0, 'When', 'WRB', 11, 'advmod', 'O'),
|
||||
(1, 'Walter', 'NNP', 2, 'compound', 'B-PERSON'),
|
||||
|
|
|
@ -1,6 +1,8 @@
|
|||
from __future__ import unicode_literals
|
||||
import pytest
|
||||
from .._align import align, multi_align
|
||||
from numpy.testing import assert_array_equal
|
||||
from .._align import levenshtein_align, multi_align, Alignment
|
||||
from .._align import _get_regions, _get_many2one
|
||||
|
||||
|
||||
@pytest.mark.parametrize('string1,string2,cost', [
|
||||
|
@ -11,7 +13,7 @@ from .._align import align, multi_align
|
|||
('t', 'catsie', 5),
|
||||
])
|
||||
def test_align_costs(string1, string2, cost):
|
||||
output_cost, i2j, j2i, matrix = align(string1, string2)
|
||||
output_cost, i2j, j2i, matrix = levenshtein_align(string1, string2)
|
||||
assert output_cost == cost
|
||||
|
||||
|
||||
|
@ -23,7 +25,7 @@ def test_align_costs(string1, string2, cost):
|
|||
('t', 'catsie', [2]),
|
||||
])
|
||||
def test_align_i2j(string1, string2, i2j):
|
||||
output_cost, output_i2j, j2i, matrix = align(string1, string2)
|
||||
output_cost, output_i2j, j2i, matrix = levenshtein_align(string1, string2)
|
||||
assert list(output_i2j) == i2j
|
||||
|
||||
|
||||
|
@ -35,25 +37,54 @@ def test_align_i2j(string1, string2, i2j):
|
|||
('t', 'catsie', [-1, -1, 0, -1, -1, -1]),
|
||||
])
|
||||
def test_align_i2j(string1, string2, j2i):
|
||||
output_cost, output_i2j, output_j2i, matrix = align(string1, string2)
|
||||
output_cost, output_i2j, output_j2i, matrix = levenshtein_align(string1, string2)
|
||||
assert list(output_j2i) == j2i
|
||||
|
||||
def test_align_strings():
|
||||
words1 = ['hello', 'this', 'is', 'test!']
|
||||
words2 = ['hellothis', 'is', 'test', '!']
|
||||
cost, i2j, j2i, matrix = align(words1, words2)
|
||||
cost, i2j, j2i, matrix = levenshtein_align(words1, words2)
|
||||
assert cost == 4
|
||||
assert list(i2j) == [-1, -1, 1, -1]
|
||||
assert list(j2i) == [-1, 2, -1, -1]
|
||||
|
||||
def test_levenshtein_align_is_symmetric():
|
||||
words1 = ['a', 'bc', 'd']
|
||||
words2 = ['a', 'b', 'c', 'd']
|
||||
i_lengths = [len(w) for w in words1]
|
||||
j_lengths = [len(w) for w in words2]
|
||||
cost, i2j, j2i, matrix = levenshtein_align(words1, words2)
|
||||
new_cost, new_j2i, new_i2j, new_matrix = levenshtein_align(words2, words1)
|
||||
assert cost == new_cost
|
||||
assert list(i2j) == list(new_i2j)
|
||||
assert list(j2i) == list(new_j2i)
|
||||
assert_array_equal(matrix, new_matrix.T)
|
||||
|
||||
|
||||
def test_get_many_to_one_undersegment():
|
||||
words1 = ['a', 'bc', 'd']
|
||||
words2 = ['a', 'b', 'c', 'd']
|
||||
i_lengths = [len(w) for w in words1]
|
||||
j_lengths = [len(w) for w in words2]
|
||||
cost, i2j, j2i, matrix = levenshtein_align(words1, words2)
|
||||
i2j_miss = _get_regions(i2j, i_lengths)
|
||||
j2i_miss = _get_regions(j2i, j_lengths)
|
||||
i2j_many2one = _get_many2one(i2j_miss, j2i_miss, i_lengths, j_lengths)
|
||||
assert i2j_many2one == {}
|
||||
j2i_many2one = _get_many2one(j2i_miss, i2j_miss, j_lengths, i_lengths)
|
||||
assert j2i_many2one == {1: 1, 2: 1}
|
||||
|
||||
|
||||
def test_align_many_to_one():
|
||||
words1 = ['a', 'b', 'c', 'd', 'e', 'f', 'g', 'h']
|
||||
words2 = ['ab', 'bc', 'e', 'fg', 'h']
|
||||
cost, i2j, j2i, matrix = align(words1, words2)
|
||||
cost, i2j, j2i, matrix = levenshtein_align(words1, words2)
|
||||
assert list(i2j) == [-1, -1, -1, -1, 2, -1, -1, 4]
|
||||
lengths1 = [len(w) for w in words1]
|
||||
lengths2 = [len(w) for w in words2]
|
||||
i2j_multi, j2i_multi = multi_align(i2j, j2i, lengths1, lengths2)
|
||||
i2j_miss = _get_regions(i2j, lengths1)
|
||||
j2i_miss = _get_regions(j2i, lengths2)
|
||||
i2j_multi = _get_many2one(i2j_miss, j2i_miss, lengths1, lengths2)
|
||||
assert i2j_multi[0] == 0
|
||||
assert i2j_multi[1] == 0
|
||||
assert i2j_multi[2] == 1
|
||||
|
@ -62,18 +93,29 @@ def test_align_many_to_one():
|
|||
assert i2j_multi[5] == 3
|
||||
assert i2j_multi[6] == 3
|
||||
|
||||
assert j2i_multi[0] == 1
|
||||
assert j2i_multi[1] == 3
|
||||
|
||||
def test_align_one_to_many():
|
||||
def test_alignment_class_oversegment():
|
||||
words1 = ['a', 'b', 'c', 'd']
|
||||
words2 = ['a', 'bc', 'd']
|
||||
A = Alignment(words1, words2)
|
||||
B = Alignment(words2, words1)
|
||||
assert A._y2t == [0, (1, 0), (1, 1), 2]
|
||||
assert A._t2y == [0, [1, 2], 3]
|
||||
|
||||
|
||||
def test_alignment_class_undersegment():
|
||||
words1 = ['a', 'bc', 'd']
|
||||
words2 = ['a', 'b', 'c', 'd']
|
||||
cost, i2j, j2i, matrix = align(words1, words2)
|
||||
assert list(i2j) == [0, -1, 3]
|
||||
lengths1 = [len(w) for w in words1]
|
||||
lengths2 = [len(w) for w in words2]
|
||||
multi_j2i, multi_i2j = multi_align(j2i, i2j, lengths2, lengths1)
|
||||
unsegmented = {}
|
||||
for j, i in multi_j2i.items():
|
||||
unsegmented.setdefault(i, []).append(j)
|
||||
assert unsegmented[1] == [1, 2]
|
||||
A = Alignment(words1, words2)
|
||||
assert A._y2t == [0, [1, 2], 3]
|
||||
assert A._t2y == [0, (1, 0), (1, 1), 2]
|
||||
|
||||
|
||||
@pytest.mark.parametrize('fused,flat', [
|
||||
([ [(0, 1), 1], 1], [1, 2, 2]),
|
||||
([1, 1, [1, 3], 1], [1, 1, 1, 4, 1])
|
||||
])
|
||||
def test_flatten_fused_heads(fused, flat):
|
||||
assert Alignment.flatten(fused) == flat
|
||||
|
||||
|
||||
|
|
Loading…
Reference in New Issue
Block a user