spaCy/tests/tokenizer/test_emoticons.py

43 lines
1.2 KiB
Python
Raw Normal View History

2014-11-02 05:22:14 +03:00
from __future__ import unicode_literals
2014-12-21 12:43:27 +03:00
import pytest
2014-11-02 05:22:14 +03:00
2014-12-21 12:43:27 +03:00
from spacy.en import English
2014-11-02 05:22:14 +03:00
2014-12-21 12:43:27 +03:00
@pytest.fixture
def EN():
2014-12-30 13:34:09 +03:00
return English()
2014-12-21 12:43:27 +03:00
2015-04-19 22:39:18 +03:00
2014-12-21 12:43:27 +03:00
def test_tweebo_challenge(EN):
2014-11-02 05:22:14 +03:00
text = u""":o :/ :'( >:o (: :) >.< XD -__- o.O ;D :-) @_@ :P 8D :1 >:( :D =| ") :> ...."""
tokens = EN(text, parse=False, tag=False)
assert tokens[0].orth_ == ":o"
assert tokens[1].orth_ == ":/"
assert tokens[2].orth_ == ":'("
assert tokens[3].orth_ == ">:o"
assert tokens[4].orth_ == "(:"
assert tokens[5].orth_ == ":)"
assert tokens[6].orth_ == ">.<"
assert tokens[7].orth_ == "XD"
assert tokens[8].orth_ == "-__-"
assert tokens[9].orth_ == "o.O"
assert tokens[10].orth_ == ";D"
assert tokens[11].orth_ == ":-)"
assert tokens[12].orth_ == "@_@"
assert tokens[13].orth_ == ":P"
assert tokens[14].orth_ == "8D"
assert tokens[15].orth_ == ":1"
assert tokens[16].orth_ == ">:("
assert tokens[17].orth_ == ":D"
assert tokens[18].orth_ == "=|"
assert tokens[19].orth_ == '")'
assert tokens[20].orth_ == ':>'
assert tokens[21].orth_ == '....'
2014-12-21 12:43:27 +03:00
def test_false_positive(EN):
text = "example:)"
tokens = EN(text, parse=False, tag=False)
assert len(tokens) == 3