Temporarily put back the tokenize_from_strings method, while tests aren't updated yet.

This commit is contained in:
Matthew Honnibal 2016-11-04 19:18:07 +01:00
parent 586206c7b8
commit a36353df47

View File

@ -107,10 +107,11 @@ cdef class Tokenizer:
return (self.__class__, args, None, None)
cpdef Doc tokens_from_list(self, list strings):
raise NotImplementedError(
"Method deprecated in 1.0.\n"
"Old: tokenizer.tokens_from_list(strings)\n"
"New: Doc(tokenizer.vocab, words=strings)")
return Doc(self.vocab, words=strings)
#raise NotImplementedError(
# "Method deprecated in 1.0.\n"
# "Old: tokenizer.tokens_from_list(strings)\n"
# "New: Doc(tokenizer.vocab, words=strings)")
@cython.boundscheck(False)
def __call__(self, unicode string):