//- 💫 DOCS > USAGE > WHAT'S NEW IN V2.0 > BACKWARDS INCOMPATIBILITIES +table(["Old", "New"]) +row +cell | #[code spacy.en] etc. +cell | #[code spacy.lang.en] etc. +row +cell #[code spacy.orth] +cell #[code spacy.lang.xx.lex_attrs] +row +cell #[code spacy.syntax.iterators] +cell #[code spacy.lang.xx.syntax_iterators] +row +cell #[code spacy.tagger.Tagger] +cell #[code spacy.pipeline.Tagger] +row +cell #[code spacy.cli.model] +cell #[+api("cli#vocab") #[code spacy.cli.vocab]] +row +cell #[code Language.save_to_directory] +cell #[+api("language#to_disk") #[code Language.to_disk]] +row +cell #[code Language.end_training] +cell #[+api("language#begin_training") #[code Language.begin_training]] +row +cell #[code Language.create_make_doc] +cell #[+api("language#attributes") #[code Language.tokenizer]] +row +cell | #[code Vocab.load] | #[code Vocab.load_lexemes] +cell | #[+api("vocab#from_disk") #[code Vocab.from_disk]] | #[+api("vocab#from_bytes") #[code Vocab.from_bytes]] +row +cell | #[code Vocab.dump] +cell | #[+api("vocab#to_disk") #[code Vocab.to_disk]]#[br] | #[+api("vocab#to_bytes") #[code Vocab.to_bytes]] +row +cell | #[code Vocab.load_vectors] | #[code Vocab.load_vectors_from_bin_loc] +cell | #[+api("vectors#from_disk") #[code Vectors.from_disk]] | #[+api("vectors#from_bytes") #[code Vectors.from_bytes]] | #[+api("vectors#from_glove") #[code Vectors.from_glove]] +row +cell | #[code Vocab.dump_vectors] +cell | #[+api("vectors#to_disk") #[code Vectors.to_disk]] | #[+api("vectors#to_bytes") #[code Vectors.to_bytes]] +row +cell | #[code StringStore.load] +cell | #[+api("stringstore#from_disk") #[code StringStore.from_disk]] | #[+api("stringstore#from_bytes") #[code StringStore.from_bytes]] +row +cell | #[code StringStore.dump] +cell | #[+api("stringstore#to_disk") #[code StringStore.to_disk]] | #[+api("stringstore#to_bytes") #[code StringStore.to_bytes]] +row +cell #[code Tokenizer.load] +cell | #[+api("tokenizer#from_disk") #[code Tokenizer.from_disk]] | #[+api("tokenizer#from_bytes") #[code Tokenizer.from_bytes]] +row +cell #[code Tagger.load] +cell | #[+api("tagger#from_disk") #[code Tagger.from_disk]] | #[+api("tagger#from_bytes") #[code Tagger.from_bytes]] +row +cell #[code DependencyParser.load] +cell | #[+api("dependencyparser#from_disk") #[code DependencyParser.from_disk]] | #[+api("dependencyparser#from_bytes") #[code DependencyParser.from_bytes]] +row +cell #[code EntityRecognizer.load] +cell | #[+api("entityrecognizer#from_disk") #[code EntityRecognizer.from_disk]] | #[+api("entityrecognizer#from_bytes") #[code EntityRecognizer.from_bytes]] +row +cell #[code Matcher.load] +cell - +row +cell | #[code Matcher.add_pattern] | #[code Matcher.add_entity] +cell | #[+api("matcher#add") #[code Matcher.add]] | #[+api("phrasematcher#add") #[code PhraseMatcher.add]] +row +cell #[code Matcher.get_entity] +cell #[+api("matcher#get") #[code Matcher.get]] +row +cell #[code Matcher.has_entity] +cell #[+api("matcher#has_key") #[code Matcher.has_key]] +row +cell #[code Doc.read_bytes] +cell | #[+api("doc#to_bytes") #[code Doc.to_bytes]] | #[+api("doc#from_bytes") #[code Doc.from_bytes]] | #[+api("doc#to_disk") #[code Doc.to_disk]] | #[+api("doc#from_disk") #[code Doc.from_disk]] +row +cell #[code Token.is_ancestor_of] +cell #[+api("token#is_ancestor") #[code Token.is_ancestor]] +row +cell #[code Span.sent_start] +cell #[+api("span#is_sent_start") #[code Span.is_sent_start]]