mirror of
				https://github.com/django/daphne.git
				synced 2025-10-26 13:31:00 +03:00 
			
		
		
		
	Added compatibility for hypothesis 4 (#261)
hypothesis `average_size` argument was already deprecated [1], and was effectively removed in hypothesis 4 [2]. [1] https://github.com/HypothesisWorks/hypothesis/pull/1162 [2] https://hypothesis.readthedocs.io/en/latest/changes.html#v4-0-0
This commit is contained in:
		
							parent
							
								
									a3494215cf
								
							
						
					
					
						commit
						f46c2448b1
					
				
							
								
								
									
										2
									
								
								setup.py
									
									
									
									
									
								
							
							
						
						
									
										2
									
								
								setup.py
									
									
									
									
									
								
							|  | @ -25,7 +25,7 @@ setup( | ||||||
|     install_requires=["twisted[tls]>=18.7", "autobahn>=0.18", "asgiref~=3.0"], |     install_requires=["twisted[tls]>=18.7", "autobahn>=0.18", "asgiref~=3.0"], | ||||||
|     setup_requires=["pytest-runner"], |     setup_requires=["pytest-runner"], | ||||||
|     extras_require={ |     extras_require={ | ||||||
|         "tests": ["hypothesis~=3.88", "pytest~=3.10", "pytest-asyncio~=0.8"] |         "tests": ["hypothesis~=4.23", "pytest~=3.10", "pytest-asyncio~=0.8"] | ||||||
|     }, |     }, | ||||||
|     entry_points={ |     entry_points={ | ||||||
|         "console_scripts": ["daphne = daphne.cli:CommandLineInterface.entrypoint"] |         "console_scripts": ["daphne = daphne.cli:CommandLineInterface.entrypoint"] | ||||||
|  |  | ||||||
|  | @ -17,7 +17,7 @@ def http_method(): | ||||||
| 
 | 
 | ||||||
| def _http_path_portion(): | def _http_path_portion(): | ||||||
|     alphabet = string.ascii_letters + string.digits + "-._~" |     alphabet = string.ascii_letters + string.digits + "-._~" | ||||||
|     return strategies.text(min_size=1, average_size=10, max_size=128, alphabet=alphabet) |     return strategies.text(min_size=1, max_size=128, alphabet=alphabet) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def http_path(): | def http_path(): | ||||||
|  | @ -33,7 +33,7 @@ def http_body(): | ||||||
|     """ |     """ | ||||||
|     Returns random binary body data. |     Returns random binary body data. | ||||||
|     """ |     """ | ||||||
|     return strategies.binary(min_size=0, average_size=600, max_size=1500) |     return strategies.binary(min_size=0, max_size=1500) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def valid_bidi(value): | def valid_bidi(value): | ||||||
|  | @ -52,24 +52,22 @@ def valid_bidi(value): | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def _domain_label(): | def _domain_label(): | ||||||
|     return strategies.text( |     return strategies.text(alphabet=letters, min_size=1, max_size=63).filter(valid_bidi) | ||||||
|         alphabet=letters, min_size=1, average_size=6, max_size=63 |  | ||||||
|     ).filter(valid_bidi) |  | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def international_domain_name(): | def international_domain_name(): | ||||||
|     """ |     """ | ||||||
|     Returns a byte string of a domain name, IDNA-encoded. |     Returns a byte string of a domain name, IDNA-encoded. | ||||||
|     """ |     """ | ||||||
|     return strategies.lists(_domain_label(), min_size=2, average_size=2).map( |     return strategies.lists(_domain_label(), min_size=2).map( | ||||||
|         lambda s: (".".join(s)).encode("idna") |         lambda s: (".".join(s)).encode("idna") | ||||||
|     ) |     ) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def _query_param(): | def _query_param(): | ||||||
|     return strategies.text( |     return strategies.text(alphabet=letters, min_size=1, max_size=255).map( | ||||||
|         alphabet=letters, min_size=1, average_size=10, max_size=255 |         lambda s: s.encode("utf8") | ||||||
|     ).map(lambda s: s.encode("utf8")) |     ) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
| def query_params(): | def query_params(): | ||||||
|  | @ -79,7 +77,7 @@ def query_params(): | ||||||
|     ensures that the total urlencoded query string is not longer than 1500 characters. |     ensures that the total urlencoded query string is not longer than 1500 characters. | ||||||
|     """ |     """ | ||||||
|     return strategies.lists( |     return strategies.lists( | ||||||
|         strategies.tuples(_query_param(), _query_param()), min_size=0, average_size=5 |         strategies.tuples(_query_param(), _query_param()), min_size=0 | ||||||
|     ).filter(lambda x: len(parse.urlencode(x)) < 1500) |     ).filter(lambda x: len(parse.urlencode(x)) < 1500) | ||||||
| 
 | 
 | ||||||
| 
 | 
 | ||||||
|  | @ -109,7 +107,6 @@ def header_value(): | ||||||
|             + string.punctuation.replace(",", "") |             + string.punctuation.replace(",", "") | ||||||
|             + " /t", |             + " /t", | ||||||
|             min_size=1, |             min_size=1, | ||||||
|             average_size=40, |  | ||||||
|             max_size=8190, |             max_size=8190, | ||||||
|         ) |         ) | ||||||
|         .map(lambda s: s.encode("utf-8")) |         .map(lambda s: s.encode("utf-8")) | ||||||
|  | @ -125,8 +122,5 @@ def headers(): | ||||||
|     https://en.wikipedia.org/wiki/List_of_HTTP_header_fields |     https://en.wikipedia.org/wiki/List_of_HTTP_header_fields | ||||||
|     """ |     """ | ||||||
|     return strategies.lists( |     return strategies.lists( | ||||||
|         strategies.tuples(header_name(), header_value()), |         strategies.tuples(header_name(), header_value()), min_size=0, max_size=100 | ||||||
|         min_size=0, |  | ||||||
|         average_size=10, |  | ||||||
|         max_size=100, |  | ||||||
|     ) |     ) | ||||||
|  |  | ||||||
		Loading…
	
		Reference in New Issue
	
	Block a user