Limit parser model size, to hopefully reduce memory during CI tests

This commit is contained in:
Matthew Honnibal 2018-01-28 21:00:32 +01:00
parent 6d978e5c35
commit f5b1ad4100

View File

@ -17,7 +17,7 @@ def test_beam_parse():
nlp = Language() nlp = Language()
nlp.add_pipe(DependencyParser(nlp.vocab), name='parser') nlp.add_pipe(DependencyParser(nlp.vocab), name='parser')
nlp.parser.add_label('nsubj') nlp.parser.add_label('nsubj')
nlp.begin_training() nlp.parser.begin_training([], token_vector_width=8, hidden_width=8)
doc = nlp.make_doc(u'Australia is a country') doc = nlp.make_doc(u'Australia is a country')
nlp.parser(doc, beam_width=2) nlp.parser(doc, beam_width=2)