mirror of
				https://github.com/explosion/spaCy.git
				synced 2025-11-04 09:57:26 +03:00 
			
		
		
		
	## Description Related issues: #2379 (should be fixed by separating model tests) * **total execution time down from > 300 seconds to under 60 seconds** 🎉 * removed all model-specific tests that could only really be run manually anyway – those will now live in a separate test suite in the [`spacy-models`](https://github.com/explosion/spacy-models) repository and are already integrated into our new model training infrastructure * changed all relative imports to absolute imports to prepare for moving the test suite from `/spacy/tests` to `/tests` (it'll now always test against the installed version) * merged old regression tests into collections, e.g. `test_issue1001-1500.py` (about 90% of the regression tests are very short anyways) * tidied up and rewrote existing tests wherever possible ### Todo - [ ] move tests to `/tests` and adjust CI commands accordingly - [x] move model test suite from internal repo to `spacy-models` - [x] ~~investigate why `pipeline/test_textcat.py` is flakey~~ - [x] review old regression tests (leftover files) and see if they can be merged, simplified or deleted - [ ] update documentation on how to run tests ### Types of change enhancement, tests ## Checklist <!--- Before you submit the PR, go over this checklist and make sure you can tick off all the boxes. [] -> [x] --> - [x] I have submitted the spaCy Contributor Agreement. - [x] I ran the tests, and all new and existing tests passed. - [ ] My changes don't require a change to the documentation, or if they do, I've added all required information.
		
			
				
	
	
		
			41 lines
		
	
	
		
			1.3 KiB
		
	
	
	
		
			Python
		
	
	
	
	
	
			
		
		
	
	
			41 lines
		
	
	
		
			1.3 KiB
		
	
	
	
		
			Python
		
	
	
	
	
	
# coding: utf-8
 | 
						|
from __future__ import unicode_literals
 | 
						|
 | 
						|
import pytest
 | 
						|
import dill as pickle
 | 
						|
import numpy
 | 
						|
from spacy.strings import StringStore
 | 
						|
from spacy.vocab import Vocab
 | 
						|
from spacy.attrs import NORM
 | 
						|
 | 
						|
 | 
						|
@pytest.mark.parametrize('text1,text2', [('hello', 'bye')])
 | 
						|
def test_pickle_string_store(text1, text2):
 | 
						|
    stringstore = StringStore()
 | 
						|
    store1 = stringstore[text1]
 | 
						|
    store2 = stringstore[text2]
 | 
						|
    data = pickle.dumps(stringstore, protocol=-1)
 | 
						|
    unpickled = pickle.loads(data)
 | 
						|
    assert unpickled[text1] == store1
 | 
						|
    assert unpickled[text2] == store2
 | 
						|
    assert len(stringstore) == len(unpickled)
 | 
						|
 | 
						|
 | 
						|
@pytest.mark.parametrize('text1,text2', [('dog', 'cat')])
 | 
						|
def test_pickle_vocab(text1, text2):
 | 
						|
    vocab = Vocab(lex_attr_getters={int(NORM): lambda string: string[:-1]})
 | 
						|
    vocab.set_vector('dog', numpy.ones((5,), dtype='f'))
 | 
						|
    lex1 = vocab[text1]
 | 
						|
    lex2 = vocab[text2]
 | 
						|
    assert lex1.norm_ == text1[:-1]
 | 
						|
    assert lex2.norm_ == text2[:-1]
 | 
						|
    data = pickle.dumps(vocab)
 | 
						|
    unpickled = pickle.loads(data)
 | 
						|
    assert unpickled[text1].orth == lex1.orth
 | 
						|
    assert unpickled[text2].orth == lex2.orth
 | 
						|
    assert unpickled[text1].norm == lex1.norm
 | 
						|
    assert unpickled[text2].norm == lex2.norm
 | 
						|
    assert unpickled[text1].norm != unpickled[text2].norm
 | 
						|
    assert unpickled.vectors is not None
 | 
						|
    assert list(vocab['dog'].vector) == [1.,1.,1.,1.,1.]
 |