Add C4 flake8-comprehensions

This commit is contained in:
Kien Dang 2023-08-06 01:45:54 +08:00
parent c4d9bd374e
commit b7e95b218a
5 changed files with 27 additions and 26 deletions

View File

@ -4,6 +4,7 @@ select = [
"F", # pyflake
"I", # isort
"B", # flake8-bugbear
"C4", # flake8-comprehensions
"UP", # pyupgrade
]

View File

@ -58,7 +58,7 @@ class DjangoFilterConnectionField(DjangoConnectionField):
def filterset_class(self):
if not self._filterset_class:
fields = self._fields or self.node_type._meta.filter_fields
meta = dict(model=self.model, fields=fields)
meta = {"model": self.model, "fields": fields}
if self._extra_filter_meta:
meta.update(self._extra_filter_meta)

View File

@ -1444,19 +1444,19 @@ def test_connection_should_limit_after_to_list_length():
"""
after = base64.b64encode(b"arrayconnection:10").decode()
result = schema.execute(query, variable_values=dict(after=after))
result = schema.execute(query, variable_values={"after": after})
expected = {"allReporters": {"edges": []}}
assert not result.errors
assert result.data == expected
REPORTERS = [
dict(
first_name=f"First {i}",
last_name=f"Last {i}",
email=f"johndoe+{i}@example.com",
a_choice=1,
)
{
"first_name": f"First {i}",
"last_name": f"Last {i}",
"email": f"johndoe+{i}@example.com",
"a_choice": 1,
}
for i in range(6)
]
@ -1531,7 +1531,7 @@ def test_should_have_next_page(graphene_settings):
assert result.data["allReporters"]["pageInfo"]["hasNextPage"]
last_result = result.data["allReporters"]["pageInfo"]["endCursor"]
result2 = schema.execute(query, variable_values=dict(first=4, after=last_result))
result2 = schema.execute(query, variable_values={"first": 4, "after": last_result})
assert not result2.errors
assert len(result2.data["allReporters"]["edges"]) == 2
assert not result2.data["allReporters"]["pageInfo"]["hasNextPage"]
@ -1622,7 +1622,7 @@ class TestBackwardPagination:
after = base64.b64encode(b"arrayconnection:0").decode()
result = schema.execute(
query_first_last_and_after,
variable_values=dict(after=after),
variable_values={"after": after},
)
assert not result.errors
assert len(result.data["allReporters"]["edges"]) == 3
@ -1654,7 +1654,7 @@ class TestBackwardPagination:
before = base64.b64encode(b"arrayconnection:5").decode()
result = schema.execute(
query_first_last_and_after,
variable_values=dict(before=before),
variable_values={"before": before},
)
assert not result.errors
assert len(result.data["allReporters"]["edges"]) == 1
@ -1877,7 +1877,7 @@ def test_connection_should_forbid_offset_filtering_with_before():
}
"""
before = base64.b64encode(b"arrayconnection:2").decode()
result = schema.execute(query, variable_values=dict(before=before))
result = schema.execute(query, variable_values={"before": before})
expected_error = "You can't provide a `before` value at the same time as an `offset` value to properly paginate the `allReporters` connection."
assert len(result.errors) == 1
assert result.errors[0].message == expected_error
@ -1913,7 +1913,7 @@ def test_connection_should_allow_offset_filtering_with_after():
"""
after = base64.b64encode(b"arrayconnection:0").decode()
result = schema.execute(query, variable_values=dict(after=after))
result = schema.execute(query, variable_values={"after": after})
assert not result.errors
expected = {
"allReporters": {
@ -1949,7 +1949,7 @@ def test_connection_should_succeed_if_last_higher_than_number_of_objects():
}
"""
result = schema.execute(query, variable_values=dict(last=2))
result = schema.execute(query, variable_values={"last": 2})
assert not result.errors
expected = {"allReporters": {"edges": []}}
assert result.data == expected
@ -1959,7 +1959,7 @@ def test_connection_should_succeed_if_last_higher_than_number_of_objects():
Reporter.objects.create(first_name="Jane", last_name="Roe")
Reporter.objects.create(first_name="Some", last_name="Lady")
result = schema.execute(query, variable_values=dict(last=2))
result = schema.execute(query, variable_values={"last": 2})
assert not result.errors
expected = {
"allReporters": {
@ -1971,7 +1971,7 @@ def test_connection_should_succeed_if_last_higher_than_number_of_objects():
}
assert result.data == expected
result = schema.execute(query, variable_values=dict(last=4))
result = schema.execute(query, variable_values={"last": 4})
assert not result.errors
expected = {
"allReporters": {
@ -1985,7 +1985,7 @@ def test_connection_should_succeed_if_last_higher_than_number_of_objects():
}
assert result.data == expected
result = schema.execute(query, variable_values=dict(last=20))
result = schema.execute(query, variable_values={"last": 20})
assert not result.errors
expected = {
"allReporters": {

View File

@ -229,7 +229,7 @@ def test_allows_sending_a_mutation_via_post(client):
def test_allows_post_with_url_encoding(client):
response = client.post(
url_string(),
urlencode(dict(query="{test}")),
urlencode({"query": "{test}"}),
"application/x-www-form-urlencoded",
)
@ -303,10 +303,10 @@ def test_supports_post_url_encoded_query_with_string_variables(client):
response = client.post(
url_string(),
urlencode(
dict(
query="query helloWho($who: String){ test(who: $who) }",
variables=json.dumps({"who": "Dolly"}),
)
{
"query": "query helloWho($who: String){ test(who: $who) }",
"variables": json.dumps({"who": "Dolly"}),
}
),
"application/x-www-form-urlencoded",
)
@ -329,7 +329,7 @@ def test_supports_post_json_quey_with_get_variable_values(client):
def test_post_url_encoded_query_with_get_variable_values(client):
response = client.post(
url_string(variables=json.dumps({"who": "Dolly"})),
urlencode(dict(query="query helloWho($who: String){ test(who: $who) }")),
urlencode({"query": "query helloWho($who: String){ test(who: $who) }"}),
"application/x-www-form-urlencoded",
)
@ -511,7 +511,7 @@ def test_handles_django_request_error(client, monkeypatch):
monkeypatch.setattr("django.http.request.HttpRequest.read", mocked_read)
valid_json = json.dumps(dict(foo="bar"))
valid_json = json.dumps({"foo": "bar"})
response = client.post(url_string(), valid_json, "application/json")
assert response.status_code == 400

View File

@ -39,9 +39,9 @@ def get_accepted_content_types(request):
raw_content_types = request.META.get("HTTP_ACCEPT", "*/*").split(",")
qualified_content_types = map(qualify, raw_content_types)
return list(
return [
x[0] for x in sorted(qualified_content_types, key=lambda x: x[1], reverse=True)
)
]
def instantiate_middleware(middlewares):