2017-05-09 12:06:25 +03:00
|
|
|
import pytest
|
2021-12-04 22:34:48 +03:00
|
|
|
|
|
|
|
from spacy.attrs import IS_ALPHA, LEMMA, NORM, ORTH, intify_attrs
|
|
|
|
from spacy.lang.en.stop_words import STOP_WORDS
|
|
|
|
from spacy.lang.lex_attrs import is_ascii, is_currency, is_punct, is_stop
|
|
|
|
from spacy.lang.lex_attrs import like_url, word_shape
|
|
|
|
|
|
|
|
|
|
|
|
@pytest.mark.parametrize("word", ["the"])
|
|
|
|
@pytest.mark.issue(1889)
|
|
|
|
def test_issue1889(word):
|
|
|
|
assert is_stop(word, STOP_WORDS) == is_stop(word.upper(), STOP_WORDS)
|
2017-05-09 12:06:25 +03:00
|
|
|
|
|
|
|
|
2018-11-27 03:09:36 +03:00
|
|
|
@pytest.mark.parametrize("text", ["dog"])
|
2017-05-09 12:06:25 +03:00
|
|
|
def test_attrs_key(text):
|
|
|
|
assert intify_attrs({"ORTH": text}) == {ORTH: text}
|
|
|
|
assert intify_attrs({"NORM": text}) == {NORM: text}
|
|
|
|
assert intify_attrs({"lemma": text}, strings_map={text: 10}) == {LEMMA: 10}
|
|
|
|
|
|
|
|
|
2018-11-27 03:09:36 +03:00
|
|
|
@pytest.mark.parametrize("text", ["dog"])
|
2017-05-09 12:06:25 +03:00
|
|
|
def test_attrs_idempotence(text):
|
2018-11-27 03:09:36 +03:00
|
|
|
int_attrs = intify_attrs({"lemma": text, "is_alpha": True}, strings_map={text: 10})
|
2017-05-09 12:06:25 +03:00
|
|
|
assert intify_attrs(int_attrs) == {LEMMA: 10, IS_ALPHA: True}
|
|
|
|
|
|
|
|
|
2018-11-27 03:09:36 +03:00
|
|
|
@pytest.mark.parametrize("text", ["dog"])
|
2017-05-09 12:06:25 +03:00
|
|
|
def test_attrs_do_deprecated(text):
|
2018-11-27 03:09:36 +03:00
|
|
|
int_attrs = intify_attrs(
|
|
|
|
{"F": text, "is_alpha": True}, strings_map={text: 10}, _do_deprecated=True
|
|
|
|
)
|
2017-05-09 12:06:25 +03:00
|
|
|
assert int_attrs == {ORTH: 10, IS_ALPHA: True}
|
|
|
|
|
|
|
|
|
2018-11-27 03:09:36 +03:00
|
|
|
@pytest.mark.parametrize("text,match", [(",", True), (" ", False), ("a", False)])
|
2017-05-09 12:06:25 +03:00
|
|
|
def test_lex_attrs_is_punct(text, match):
|
|
|
|
assert is_punct(text) == match
|
|
|
|
|
|
|
|
|
2018-11-27 03:09:36 +03:00
|
|
|
@pytest.mark.parametrize("text,match", [(",", True), ("£", False), ("♥", False)])
|
2017-05-09 12:06:25 +03:00
|
|
|
def test_lex_attrs_is_ascii(text, match):
|
|
|
|
assert is_ascii(text) == match
|
|
|
|
|
|
|
|
|
2018-11-27 03:09:36 +03:00
|
|
|
@pytest.mark.parametrize(
|
|
|
|
"text,match",
|
|
|
|
[
|
|
|
|
("$", True),
|
|
|
|
("£", True),
|
|
|
|
("♥", False),
|
|
|
|
("€", True),
|
|
|
|
("¥", True),
|
|
|
|
("¢", True),
|
|
|
|
("a", False),
|
|
|
|
("www.google.com", False),
|
|
|
|
("dog", False),
|
|
|
|
],
|
|
|
|
)
|
2018-02-11 20:50:50 +03:00
|
|
|
def test_lex_attrs_is_currency(text, match):
|
|
|
|
assert is_currency(text) == match
|
|
|
|
|
|
|
|
|
2018-11-27 03:09:36 +03:00
|
|
|
@pytest.mark.parametrize(
|
|
|
|
"text,match",
|
|
|
|
[
|
|
|
|
("www.google.com", True),
|
|
|
|
("google.com", True),
|
|
|
|
("sydney.com", True),
|
2021-07-27 13:07:01 +03:00
|
|
|
("1abc2def.org", True),
|
2018-11-27 03:09:36 +03:00
|
|
|
("http://stupid", True),
|
|
|
|
("www.hi", True),
|
2021-07-27 13:07:01 +03:00
|
|
|
("example.com/example", True),
|
2018-11-27 03:09:36 +03:00
|
|
|
("dog", False),
|
|
|
|
("1.2", False),
|
|
|
|
("1.a", False),
|
|
|
|
("hello.There", False),
|
|
|
|
],
|
|
|
|
)
|
2017-05-09 12:06:25 +03:00
|
|
|
def test_lex_attrs_like_url(text, match):
|
|
|
|
assert like_url(text) == match
|
|
|
|
|
|
|
|
|
2018-11-27 03:09:36 +03:00
|
|
|
@pytest.mark.parametrize(
|
|
|
|
"text,shape",
|
|
|
|
[
|
|
|
|
("Nasa", "Xxxx"),
|
|
|
|
("capitalized", "xxxx"),
|
|
|
|
("999999999", "dddd"),
|
|
|
|
("C3P0", "XdXd"),
|
|
|
|
(",", ","),
|
|
|
|
("\n", "\n"),
|
|
|
|
("``,-", "``,-"),
|
|
|
|
],
|
|
|
|
)
|
2017-05-09 12:06:25 +03:00
|
|
|
def test_lex_attrs_word_shape(text, shape):
|
2019-12-21 21:04:17 +03:00
|
|
|
assert word_shape(text) == shape
|