1
1
mirror of https://github.com/explosion/spaCy.git synced 2025-02-01 20:24:09 +03:00
spaCy/spacy/tests/tokenizer/test_string_loading.py

10 lines
322 B
Python
Raw Normal View History

"""Test suspected freeing of strings"""
from __future__ import unicode_literals
def test_one(en_tokenizer):
tokens = en_tokenizer('Betty Botter bought a pound of butter.')
assert tokens[0].orth_ == 'Betty'
tokens2 = en_tokenizer('Betty also bought a pound of butter.')
assert tokens2[0].orth_ == 'Betty'