mirror of
https://github.com/explosion/spaCy.git
synced 2025-07-11 08:42:28 +03:00
Remove old and/or redundant tests
This commit is contained in:
parent
19c4132097
commit
ffcaba9017
1
setup.py
1
setup.py
|
@ -40,7 +40,6 @@ PACKAGES = [
|
||||||
'spacy.tests.morphology',
|
'spacy.tests.morphology',
|
||||||
'spacy.tests.munge',
|
'spacy.tests.munge',
|
||||||
'spacy.tests.parser',
|
'spacy.tests.parser',
|
||||||
'spacy.tests.print',
|
|
||||||
'spacy.tests.serialize',
|
'spacy.tests.serialize',
|
||||||
'spacy.tests.spans',
|
'spacy.tests.spans',
|
||||||
'spacy.tests.tagger',
|
'spacy.tests.tagger',
|
||||||
|
|
|
@ -1,33 +0,0 @@
|
||||||
"""Test the Token.conjuncts property"""
|
|
||||||
from __future__ import unicode_literals
|
|
||||||
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
|
|
||||||
def orths(tokens):
|
|
||||||
return [t.orth_ for t in tokens]
|
|
||||||
|
|
||||||
|
|
||||||
#def test_simple_two(EN):
|
|
||||||
# tokens = EN('I lost money and pride.', tag=True, parse=True)
|
|
||||||
# pride = tokens[4]
|
|
||||||
# assert orths(pride.conjuncts) == ['money', 'pride']
|
|
||||||
# money = tokens[2]
|
|
||||||
# assert orths(money.conjuncts) == ['money', 'pride']
|
|
||||||
|
|
||||||
|
|
||||||
#def test_comma_three(EN):
|
|
||||||
# tokens = EN('I found my wallet, phone and keys.')
|
|
||||||
# keys = tokens[-2]
|
|
||||||
# assert orths(keys.conjuncts) == ['wallet', 'phone', 'keys']
|
|
||||||
# wallet = tokens[3]
|
|
||||||
# assert orths(wallet.conjuncts) == ['wallet', 'phone', 'keys']
|
|
||||||
|
|
||||||
|
|
||||||
# This is failing due to parse errors
|
|
||||||
#def test_and_three():
|
|
||||||
# tokens = NLU('I found my wallet and phone and keys.')
|
|
||||||
# keys = tokens[-2]
|
|
||||||
# assert orths(keys.conjuncts) == ['wallet', 'phone', 'keys']
|
|
||||||
# wallet = tokens[3]
|
|
||||||
# assert orths(wallet.conjuncts) == ['wallet', 'phone', 'keys']
|
|
|
@ -1,98 +0,0 @@
|
||||||
# -*- coding: utf-8 -*-
|
|
||||||
import pytest
|
|
||||||
|
|
||||||
|
|
||||||
def test_print_doc(EN):
|
|
||||||
try:
|
|
||||||
doc = EN(u'I sat down for coffee at the coffee store')
|
|
||||||
print(doc)
|
|
||||||
except Exception:
|
|
||||||
pytest.fail("Printing failed")
|
|
||||||
|
|
||||||
|
|
||||||
def test_repr_doc(EN):
|
|
||||||
try:
|
|
||||||
doc = EN(u'I sat down for coffee at the coffee store')
|
|
||||||
print(repr(doc))
|
|
||||||
except Exception:
|
|
||||||
pytest.fail("Printing failed")
|
|
||||||
|
|
||||||
|
|
||||||
def test_print_doc_unicode(EN):
|
|
||||||
try:
|
|
||||||
doc = EN(u'I sat down for coffee at the café')
|
|
||||||
print(doc)
|
|
||||||
except Exception:
|
|
||||||
pytest.fail("Printing failed")
|
|
||||||
|
|
||||||
|
|
||||||
def test_repr_doc_unicode(EN):
|
|
||||||
try:
|
|
||||||
doc = EN(u'I sat down for coffee at the café')
|
|
||||||
print(repr(doc))
|
|
||||||
except Exception:
|
|
||||||
pytest.fail("Printing failed")
|
|
||||||
|
|
||||||
|
|
||||||
def test_print_span(EN):
|
|
||||||
try:
|
|
||||||
span = EN(u'I sat down for coffee at the coffee store')[-3:]
|
|
||||||
print(span)
|
|
||||||
except Exception:
|
|
||||||
pytest.fail("Printing failed")
|
|
||||||
|
|
||||||
|
|
||||||
def test_repr_span(EN):
|
|
||||||
try:
|
|
||||||
span = EN(u'I sat down for coffee at the coffee store')[-3:]
|
|
||||||
print(repr(span))
|
|
||||||
except Exception:
|
|
||||||
pytest.fail("Printing failed")
|
|
||||||
|
|
||||||
|
|
||||||
def test_print_span_unicode(EN):
|
|
||||||
try:
|
|
||||||
span = EN(u'I sat down for coffee at the café')[-3:]
|
|
||||||
print(span)
|
|
||||||
except Exception:
|
|
||||||
pytest.fail("Printing failed")
|
|
||||||
|
|
||||||
|
|
||||||
def test_repr_span_unicode(EN):
|
|
||||||
try:
|
|
||||||
span = EN(u'I sat down for coffee at the café')[-3:]
|
|
||||||
print(repr(span))
|
|
||||||
except Exception:
|
|
||||||
pytest.fail("Printing failed")
|
|
||||||
|
|
||||||
|
|
||||||
def test_print_token(EN):
|
|
||||||
try:
|
|
||||||
token = EN(u'I sat down for coffee at the coffee store')[-1]
|
|
||||||
print(token)
|
|
||||||
except Exception:
|
|
||||||
pytest.fail("Printing failed")
|
|
||||||
|
|
||||||
|
|
||||||
def test_repr_token(EN):
|
|
||||||
try:
|
|
||||||
token = EN(u'I sat down for coffee at the coffee store')[-1]
|
|
||||||
print(repr(token))
|
|
||||||
except Exception:
|
|
||||||
pytest.fail("Printing failed")
|
|
||||||
|
|
||||||
|
|
||||||
def test_print_token_unicode(EN):
|
|
||||||
try:
|
|
||||||
token = EN(u'I sat down for coffee at the café')[-1]
|
|
||||||
print(token)
|
|
||||||
except Exception:
|
|
||||||
pytest.fail("Printing failed")
|
|
||||||
|
|
||||||
|
|
||||||
def test_repr_token_unicode(EN):
|
|
||||||
try:
|
|
||||||
token = EN(u'I sat down for coffee at the café')[-1]
|
|
||||||
print(repr(token))
|
|
||||||
except Exception:
|
|
||||||
pytest.fail("Printing failed")
|
|
|
@ -1,33 +0,0 @@
|
||||||
import cloudpickle
|
|
||||||
import io
|
|
||||||
import os
|
|
||||||
import pickle
|
|
||||||
import pytest
|
|
||||||
import tempfile
|
|
||||||
|
|
||||||
try:
|
|
||||||
unicode
|
|
||||||
except NameError:
|
|
||||||
unicode = str
|
|
||||||
|
|
||||||
# These cause the addition of temp files, that are then not deleted
|
|
||||||
#@pytest.mark.models
|
|
||||||
#def test_pickle_english(EN):
|
|
||||||
# file_ = io.BytesIO()
|
|
||||||
# cloudpickle.dump(EN, file_)
|
|
||||||
#
|
|
||||||
# file_.seek(0)
|
|
||||||
#
|
|
||||||
# loaded = pickle.load(file_)
|
|
||||||
# assert loaded is not None
|
|
||||||
#
|
|
||||||
#@pytest.mark.models
|
|
||||||
#def test_cloudpickle_to_file(EN):
|
|
||||||
# f = tempfile.NamedTemporaryFile(delete=False)
|
|
||||||
# p = cloudpickle.CloudPickler(f)
|
|
||||||
# p.dump(EN)
|
|
||||||
# f.close()
|
|
||||||
# loaded_en = cloudpickle.load(open(f.name, 'rb'))
|
|
||||||
# os.unlink(f.name)
|
|
||||||
# doc = loaded_en(unicode('test parse'))
|
|
||||||
# assert len(doc) == 2
|
|
Loading…
Reference in New Issue
Block a user