2014-11-02 05:22:14 +03:00
|
|
|
from __future__ import unicode_literals
|
2017-01-04 02:47:59 +03:00
|
|
|
|
2014-12-21 12:43:27 +03:00
|
|
|
import pytest
|
2014-11-02 05:22:14 +03:00
|
|
|
|
|
|
|
|
2017-01-04 02:47:59 +03:00
|
|
|
def test_tokenizer_handles_emoticons(en_tokenizer):
|
|
|
|
# Tweebo challenge (CMU)
|
2014-11-02 05:22:14 +03:00
|
|
|
text = u""":o :/ :'( >:o (: :) >.< XD -__- o.O ;D :-) @_@ :P 8D :1 >:( :D =| ") :> ...."""
|
2015-06-07 18:24:49 +03:00
|
|
|
tokens = en_tokenizer(text)
|
2015-01-23 23:22:30 +03:00
|
|
|
assert tokens[0].orth_ == ":o"
|
|
|
|
assert tokens[1].orth_ == ":/"
|
|
|
|
assert tokens[2].orth_ == ":'("
|
|
|
|
assert tokens[3].orth_ == ">:o"
|
|
|
|
assert tokens[4].orth_ == "(:"
|
|
|
|
assert tokens[5].orth_ == ":)"
|
|
|
|
assert tokens[6].orth_ == ">.<"
|
|
|
|
assert tokens[7].orth_ == "XD"
|
|
|
|
assert tokens[8].orth_ == "-__-"
|
|
|
|
assert tokens[9].orth_ == "o.O"
|
|
|
|
assert tokens[10].orth_ == ";D"
|
|
|
|
assert tokens[11].orth_ == ":-)"
|
|
|
|
assert tokens[12].orth_ == "@_@"
|
|
|
|
assert tokens[13].orth_ == ":P"
|
|
|
|
assert tokens[14].orth_ == "8D"
|
|
|
|
assert tokens[15].orth_ == ":1"
|
|
|
|
assert tokens[16].orth_ == ">:("
|
|
|
|
assert tokens[17].orth_ == ":D"
|
|
|
|
assert tokens[18].orth_ == "=|"
|
|
|
|
assert tokens[19].orth_ == '")'
|
|
|
|
assert tokens[20].orth_ == ':>'
|
|
|
|
assert tokens[21].orth_ == '....'
|
2014-12-09 08:08:17 +03:00
|
|
|
|
|
|
|
|
2017-01-04 02:47:59 +03:00
|
|
|
@pytest.mark.parametrize('text,length', [("example:)", 3), ("108)", 2), ("XDN", 1)])
|
|
|
|
def test_tokenizer_excludes_false_pos_emoticons(en_tokenizer, text, length):
|
2015-06-07 18:24:49 +03:00
|
|
|
tokens = en_tokenizer(text)
|
2017-01-04 02:47:59 +03:00
|
|
|
assert len(tokens) == length
|