diff --git a/tests/test_lexeme_flags.py b/tests/test_lexeme_flags.py index 6b9c20d11..10276d8ea 100644 --- a/tests/test_lexeme_flags.py +++ b/tests/test_lexeme_flags.py @@ -7,18 +7,18 @@ from spacy.lexeme import * def test_is_alpha(): - the = EN.lexicon.lookup('the') + the = EN.lexicon['the'] assert the['flags'] & (1 << IS_ALPHA) - year = EN.lexicon.lookup('1999') + year = EN.lexicon['1999'] assert not year['flags'] & (1 << IS_ALPHA) - mixed = EN.lexicon.lookup('hello1') + mixed = EN.lexicon['hello1'] assert not mixed['flags'] & (1 << IS_ALPHA) def test_is_digit(): - the = EN.lexicon.lookup('the') + the = EN.lexicon['the'] assert not the['flags'] & (1 << IS_DIGIT) - year = EN.lexicon.lookup('1999') + year = EN.lexicon['1999'] assert year['flags'] & (1 << IS_DIGIT) - mixed = EN.lexicon.lookup('hello1') + mixed = EN.lexicon['hello1'] assert not mixed['flags'] & (1 << IS_DIGIT) diff --git a/tests/test_tokenizer.py b/tests/test_tokenizer.py index 4624e2828..b3506a30c 100644 --- a/tests/test_tokenizer.py +++ b/tests/test_tokenizer.py @@ -27,17 +27,17 @@ def test_punct(): def test_digits(): tokens = EN.tokenize('The year: 1984.') assert len(tokens) == 5 - assert tokens[0].sic == EN.lexicon.lookup('The')['sic'] - assert tokens[3].sic == EN.lexicon.lookup('1984')['sic'] + assert tokens[0].sic == EN.lexicon['The']['sic'] + assert tokens[3].sic == EN.lexicon['1984']['sic'] def test_contraction(): tokens = EN.tokenize("don't giggle") assert len(tokens) == 3 - assert tokens[1].sic == EN.lexicon.lookup("not")['sic'] + assert tokens[1].sic == EN.lexicon["not"]['sic'] tokens = EN.tokenize("i said don't!") assert len(tokens) == 5 - assert tokens[4].sic == EN.lexicon.lookup('!')['sic'] + assert tokens[4].sic == EN.lexicon['!']['sic'] def test_contraction_punct(): diff --git a/tests/test_vocab.py b/tests/test_vocab.py index 036e5981c..daaabd33d 100644 --- a/tests/test_vocab.py +++ b/tests/test_vocab.py @@ -4,20 +4,20 @@ from spacy.en import EN def test_neq(): - addr = EN.lexicon.lookup('Hello') - assert EN.lexicon.lookup('bye')['sic'] != addr['sic'] + addr = EN.lexicon['Hello'] + assert EN.lexicon['bye']['sic'] != addr['sic'] def test_eq(): - addr = EN.lexicon.lookup('Hello') - assert EN.lexicon.lookup('Hello')['sic'] == addr['sic'] + addr = EN.lexicon['Hello'] + assert EN.lexicon['Hello']['sic'] == addr['sic'] def test_case_neq(): - addr = EN.lexicon.lookup('Hello') - assert EN.lexicon.lookup('hello')['sic'] != addr['sic'] + addr = EN.lexicon['Hello'] + assert EN.lexicon['hello']['sic'] != addr['sic'] def test_punct_neq(): - addr = EN.lexicon.lookup('Hello') - assert EN.lexicon.lookup('Hello,')['sic'] != addr['sic'] + addr = EN.lexicon['Hello'] + assert EN.lexicon['Hello,']['sic'] != addr['sic']