mirror of
https://github.com/explosion/spaCy.git
synced 2024-12-30 20:06:30 +03:00
10 lines
322 B
Python
10 lines
322 B
Python
"""Test suspected freeing of strings"""
|
|
from __future__ import unicode_literals
|
|
|
|
|
|
def test_one(en_tokenizer):
|
|
tokens = en_tokenizer('Betty Botter bought a pound of butter.')
|
|
assert tokens[0].orth_ == 'Betty'
|
|
tokens2 = en_tokenizer('Betty also bought a pound of butter.')
|
|
assert tokens2[0].orth_ == 'Betty'
|