Fix parser batch-size bug introduced during cleanup

This commit is contained in:
Matthew Honnibal 2017-08-06 14:10:48 +02:00
parent 0eec7c9e9b
commit bfffdeabb2

View File

@ -339,12 +339,10 @@ cdef class Parser:
The number of threads with which to work on the buffer in parallel.
Yields (Doc): Documents, in order.
"""
cdef StateClass parse_state
cdef Doc doc
queue = []
for docs in cytoolz.partition_all(batch_size, docs):
docs = list(docs)
tokvecs = [d.tensor for d in docs]
tokvecs = [doc.tensor for doc in docs]
if beam_width == 1:
parse_states = self.parse_batch(docs, tokvecs)
else:
@ -364,6 +362,8 @@ cdef class Parser:
int nr_class, nr_feat, nr_piece, nr_dim, nr_state
if isinstance(docs, Doc):
docs = [docs]
if isinstance(tokvecses, np.ndarray):
tokvecses = [tokvecses]
tokvecs = self.model[0].ops.flatten(tokvecses)