Remove length cap on sentences

This commit is contained in:
Matthew Honnibal 2017-05-27 08:20:32 -05:00
parent 73a643d32a
commit de13fe0305

View File

@ -70,12 +70,12 @@ def train(_, lang, output_dir, train_data, dev_data, n_iter=20, n_sents=0,
optimizer = nlp.begin_training(lambda: corpus.train_tuples, use_gpu=use_gpu)
print("Itn.\tDep. Loss\tUAS\tNER P.\tNER R.\tNER F.\tTag %\tToken %")
print("Itn.\tLoss\tUAS\tNER P.\tNER R.\tNER F.\tTag %\tToken %")
try:
for i in range(n_iter):
with tqdm.tqdm(total=corpus.count_train(), leave=False) as pbar:
train_docs = corpus.train_docs(nlp, projectivize=True,
gold_preproc=False, max_length=1000)
gold_preproc=False, max_length=0)
losses = {}
for batch in minibatch(train_docs, size=batch_sizes):
docs, golds = zip(*batch)