spaCy/spacy/tests/tokenizer/test_indices.py

35 lines
869 B
Python
Raw Normal View History

2017-01-05 15:09:48 +03:00
# coding: utf-8
2015-01-30 08:44:29 +03:00
"""Test that token.idx correctly computes index into the original string."""
2017-01-05 15:10:51 +03:00
2015-01-30 08:44:29 +03:00
from __future__ import unicode_literals
import pytest
def test_simple_punct(en_tokenizer):
2017-01-05 15:10:51 +03:00
text = "to walk, do foo"
tokens = en_tokenizer(text)
2015-01-30 08:44:29 +03:00
assert tokens[0].idx == 0
assert tokens[1].idx == 3
assert tokens[2].idx == 7
assert tokens[3].idx == 9
assert tokens[4].idx == 12
def test_complex_punct(en_tokenizer):
2017-01-05 15:10:51 +03:00
text = "Tom (D., Ill.)!"
tokens = en_tokenizer(text)
2015-01-30 08:44:29 +03:00
assert tokens[0].idx == 0
assert len(tokens[0]) == 3
assert tokens[1].idx == 4
assert len(tokens[1]) == 1
assert tokens[2].idx == 5
assert len(tokens[2]) == 2
assert tokens[3].idx == 7
assert len(tokens[3]) == 1
assert tokens[4].idx == 9
assert len(tokens[4]) == 4
assert tokens[5].idx == 13
assert tokens[6].idx == 14