mirror of
				https://github.com/explosion/spaCy.git
				synced 2025-10-31 16:07:41 +03:00 
			
		
		
		
	Merge pull request #5473 from explosion/fix/travis-tests
Fix Python 2.7 compat
This commit is contained in:
		
						commit
						1f572ce89b
					
				|  | @ -169,21 +169,16 @@ class ChineseTokenizer(DummyTokenizer): | ||||||
|         return util.to_bytes(serializers, []) |         return util.to_bytes(serializers, []) | ||||||
| 
 | 
 | ||||||
|     def from_bytes(self, data, **kwargs): |     def from_bytes(self, data, **kwargs): | ||||||
|         pkuseg_features_b = b"" |         pkuseg_data = {"features_b": b"", "weights_b": b"", "processors_data": None} | ||||||
|         pkuseg_weights_b = b"" |  | ||||||
|         pkuseg_processors_data = None |  | ||||||
| 
 | 
 | ||||||
|         def deserialize_pkuseg_features(b): |         def deserialize_pkuseg_features(b): | ||||||
|             nonlocal pkuseg_features_b |             pkuseg_data["features_b"] = b | ||||||
|             pkuseg_features_b = b |  | ||||||
| 
 | 
 | ||||||
|         def deserialize_pkuseg_weights(b): |         def deserialize_pkuseg_weights(b): | ||||||
|             nonlocal pkuseg_weights_b |             pkuseg_data["weights_b"] = b | ||||||
|             pkuseg_weights_b = b |  | ||||||
| 
 | 
 | ||||||
|         def deserialize_pkuseg_processors(b): |         def deserialize_pkuseg_processors(b): | ||||||
|             nonlocal pkuseg_processors_data |             pkuseg_data["processors_data"] = srsly.msgpack_loads(b) | ||||||
|             pkuseg_processors_data = srsly.msgpack_loads(b) |  | ||||||
| 
 | 
 | ||||||
|         deserializers = OrderedDict( |         deserializers = OrderedDict( | ||||||
|             ( |             ( | ||||||
|  | @ -195,13 +190,13 @@ class ChineseTokenizer(DummyTokenizer): | ||||||
|         ) |         ) | ||||||
|         util.from_bytes(data, deserializers, []) |         util.from_bytes(data, deserializers, []) | ||||||
| 
 | 
 | ||||||
|         if pkuseg_features_b and pkuseg_weights_b: |         if pkuseg_data["features_b"] and pkuseg_data["weights_b"]: | ||||||
|             with tempfile.TemporaryDirectory() as tempdir: |             with tempfile.TemporaryDirectory() as tempdir: | ||||||
|                 tempdir = Path(tempdir) |                 tempdir = Path(tempdir) | ||||||
|                 with open(tempdir / "features.pkl", "wb") as fileh: |                 with open(tempdir / "features.pkl", "wb") as fileh: | ||||||
|                     fileh.write(pkuseg_features_b) |                     fileh.write(pkuseg_data["features_b"]) | ||||||
|                 with open(tempdir / "weights.npz", "wb") as fileh: |                 with open(tempdir / "weights.npz", "wb") as fileh: | ||||||
|                     fileh.write(pkuseg_weights_b) |                     fileh.write(pkuseg_data["weights_b"]) | ||||||
|                 try: |                 try: | ||||||
|                     import pkuseg |                     import pkuseg | ||||||
|                 except ImportError: |                 except ImportError: | ||||||
|  | @ -210,13 +205,9 @@ class ChineseTokenizer(DummyTokenizer): | ||||||
|                         + _PKUSEG_INSTALL_MSG |                         + _PKUSEG_INSTALL_MSG | ||||||
|                     ) |                     ) | ||||||
|                 self.pkuseg_seg = pkuseg.pkuseg(str(tempdir)) |                 self.pkuseg_seg = pkuseg.pkuseg(str(tempdir)) | ||||||
|             if pkuseg_processors_data: |             if pkuseg_data["processors_data"]: | ||||||
|                 ( |                 processors_data = pkuseg_data["processors_data"] | ||||||
|                     user_dict, |                 (user_dict, do_process, common_words, other_words) = processors_data | ||||||
|                     do_process, |  | ||||||
|                     common_words, |  | ||||||
|                     other_words, |  | ||||||
|                 ) = pkuseg_processors_data |  | ||||||
|                 self.pkuseg_seg.preprocesser = pkuseg.Preprocesser(user_dict) |                 self.pkuseg_seg.preprocesser = pkuseg.Preprocesser(user_dict) | ||||||
|                 self.pkuseg_seg.postprocesser.do_process = do_process |                 self.pkuseg_seg.postprocesser.do_process = do_process | ||||||
|                 self.pkuseg_seg.postprocesser.common_words = set(common_words) |                 self.pkuseg_seg.postprocesser.common_words = set(common_words) | ||||||
|  |  | ||||||
|  | @ -1,3 +1,4 @@ | ||||||
|  | # coding: utf8 | ||||||
| from __future__ import unicode_literals | from __future__ import unicode_literals | ||||||
| 
 | 
 | ||||||
| import pytest | import pytest | ||||||
|  |  | ||||||
		Loading…
	
		Reference in New Issue
	
	Block a user