Skip to content
New issue

Have a question about this project? Sign up for a free GitHub account to open an issue and contact its maintainers and the community.

By clicking “Sign up for GitHub”, you agree to our terms of service and privacy statement. We’ll occasionally send you account related emails.

Already on GitHub? Sign in to your account

Activate Ruff C4 Rules #5496

Open
wants to merge 2 commits into
base: develop
Choose a base branch
from
Open
Show file tree
Hide file tree
Changes from all commits
Commits
File filter

Filter by extension

Filter by extension


Conversations
Failed to load comments.
Loading
Jump to
Jump to file
Failed to load files.
Loading
Diff view
Diff view
2 changes: 1 addition & 1 deletion backend/infrahub/api/query.py
Original file line number Diff line number Diff line change
Expand Up @@ -111,7 +111,7 @@ async def execute_query(
branch=branch_params.branch.name,
query_id=gql_query.id,
query_name=gql_query.name.value,
related_node_ids=sorted(list(related_node_ids)),
related_node_ids=sorted(related_node_ids),
subscribers=sorted(subscribers),
params=params,
)
Expand Down
6 changes: 3 additions & 3 deletions backend/infrahub/core/models.py
Original file line number Diff line number Diff line change
Expand Up @@ -152,7 +152,7 @@ def routing_key(self) -> str:
return "schema.validator.path"

def __hash__(self) -> int:
return hash((type(self),) + tuple([self.constraint_name + self.path.get_path()]))
return hash((type(self),) + tuple(self.constraint_name + self.path.get_path()))


class SchemaUpdateValidationResult(BaseModel):
Expand Down Expand Up @@ -494,8 +494,8 @@ def update_list_hashable_model(
raise ValueError(f"Unable to merge the list for {field_name}, some items have the same _sorting_id")

shared_ids = intersection(list(local_sub_items.keys()), list(other_sub_items.keys()))
local_only_ids = set(list(local_sub_items.keys())) - set(shared_ids)
other_only_ids = set(list(other_sub_items.keys())) - set(shared_ids)
local_only_ids = set(local_sub_items.keys()) - set(shared_ids)
other_only_ids = set(other_sub_items.keys()) - set(shared_ids)

new_list = [value for key, value in local_sub_items.items() if key in local_only_ids]
new_list.extend(
Expand Down
2 changes: 1 addition & 1 deletion backend/infrahub/core/relationship/model.py
Original file line number Diff line number Diff line change
Expand Up @@ -1032,7 +1032,7 @@ async def update( # pylint: disable=too-many-branches
changed = True

# Check if some relationship got removed by checking if the previous list of relationship is a subset of the current list of not
if set(list(previous_relationships.keys())) <= {rel.peer_id for rel in await self.get_relationships(db=db)}:
if set(previous_relationships.keys()) <= {rel.peer_id for rel in await self.get_relationships(db=db)}:
changed = True

if changed:
Expand Down
4 changes: 2 additions & 2 deletions backend/infrahub/core/schema/schema_branch.py
Original file line number Diff line number Diff line change
Expand Up @@ -1633,7 +1633,7 @@ def manage_profile_schemas(self) -> None:

if new_used_by_profile:
core_profile_schema = self.get(name=InfrahubKind.PROFILE, duplicate=True)
core_profile_schema.used_by = sorted(list(profile_schema_kinds))
core_profile_schema.used_by = sorted(profile_schema_kinds)
self.set(name=InfrahubKind.PROFILE, schema=core_profile_schema)

if self.has(name=InfrahubKind.NODE):
Expand All @@ -1644,7 +1644,7 @@ def manage_profile_schemas(self) -> None:
if new_used_by_node:
core_node_schema = self.get(name=InfrahubKind.NODE, duplicate=True)
updated_used_by_node = set(chain(profile_schema_kinds, set(core_node_schema.used_by)))
core_node_schema.used_by = sorted(list(updated_used_by_node))
core_node_schema.used_by = sorted(updated_used_by_node)
self.set(name=InfrahubKind.NODE, schema=core_node_schema)

def manage_profile_relationships(self) -> None:
Expand Down
2 changes: 1 addition & 1 deletion backend/infrahub/git/base.py
Original file line number Diff line number Diff line change
Expand Up @@ -666,7 +666,7 @@ async def compare_local_remote(self) -> tuple[list[str], list[str]]:
log.info("New commit detected", repository=self.name, branch=branch_name)
updated_branches.append(branch_name)

return sorted(list(new_branches)), sorted(updated_branches)
return sorted(new_branches), sorted(updated_branches)

async def validate_remote_branch(self, branch_name: str) -> bool:
"""Validate a branch present on the remote repository.
Expand Down
2 changes: 1 addition & 1 deletion backend/infrahub/graphql/mutations/graphql_query.py
Original file line number Diff line number Diff line change
Expand Up @@ -47,7 +47,7 @@ async def extract_query_info(
if not valid:
raise ValueError(f"Query is not valid, {str(errors)}")

query_info["models"] = {"value": sorted(list(await analyzer.get_models_in_use(types=info.context.types)))}
query_info["models"] = {"value": sorted(await analyzer.get_models_in_use(types=info.context.types))}
query_info["depth"] = {"value": await analyzer.calculate_depth()}
query_info["height"] = {"value": await analyzer.calculate_height()}
query_info["operations"] = {
Expand Down
2 changes: 1 addition & 1 deletion backend/infrahub/graphql/parser.py
Original file line number Diff line number Diff line change
Expand Up @@ -162,7 +162,7 @@ def apply_directives(self, selection_set: SelectionSetNode, fields: dict, path:
FieldNode(
kind="field",
name=NameNode(kind="name", value=sub_node.key),
selection_set=SelectionSetNode(selections=tuple([sub_node.node])),
selection_set=SelectionSetNode(selections=(sub_node.node,)),
)
)
selection_set.selections = tuple(selections)
Expand Down
2 changes: 1 addition & 1 deletion backend/tests/helpers/file_repo.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,7 +40,7 @@ def _initial_directory(self, repo_base: Path) -> str:
return initial_directory

def _apply_pull_requests(self, repo_base: Path) -> None:
pull_requests = sorted(list(repo_base.glob("pr*")))
pull_requests = sorted(repo_base.glob("pr*"))
for pull_request in pull_requests:
branch = str(pull_request).split("__")[-1]
if branch in self._branches:
Expand Down
6 changes: 3 additions & 3 deletions backend/tests/unit/api/test_40_schema.py
Original file line number Diff line number Diff line change
Expand Up @@ -32,8 +32,8 @@ async def test_schema_read_endpoint_default_branch(
core_nodes = [node for node in core_models["nodes"] if node["namespace"] != "Internal"]
core_generics = [node for node in core_models["generics"] if node["namespace"] != "Internal"]

expected_nodes = set([dict(item).get("name") for item in core_nodes + car_person_schema_generics.nodes])
expected_generics = set([dict(item).get("name") for item in core_generics + car_person_schema_generics.generics])
expected_nodes = {dict(item).get("name") for item in core_nodes + car_person_schema_generics.nodes}
expected_generics = {dict(item).get("name") for item in core_generics + car_person_schema_generics.generics}

assert "nodes" in schema
assert "generics" in schema
Expand Down Expand Up @@ -68,7 +68,7 @@ async def test_schema_read_endpoint_branch1(

core_nodes = [node for node in core_models["nodes"] if node["namespace"] != "Internal"]

expected_nodes = set([dict(node).get("name") for node in core_nodes + car_person_schema_generics.nodes])
expected_nodes = {dict(node).get("name") for node in core_nodes + car_person_schema_generics.nodes}
assert "nodes" in schema
assert len(schema["nodes"]) == len(expected_nodes)

Expand Down
4 changes: 2 additions & 2 deletions backend/tests/unit/core/diff/query/test_read.py
Original file line number Diff line number Diff line change
Expand Up @@ -258,8 +258,8 @@ async def test_get_without_parent(self, db: InfrahubDatabase, default_branch: Br
include_parents=True,
)

assert set([node.label for node in diffs_without[0].nodes]) == {"paris-r1", "paris rack2", "THING1"}
assert set([node.label for node in diffs_with[0].nodes]) == {
assert {node.label for node in diffs_without[0].nodes} == {"paris-r1", "paris rack2", "THING1"}
assert {node.label for node in diffs_with[0].nodes} == {
"paris",
"THING1",
"paris-r1",
Expand Down
16 changes: 8 additions & 8 deletions backend/tests/unit/core/test_branch.py
Original file line number Diff line number Diff line change
Expand Up @@ -229,8 +229,8 @@ async def test_get_branches_and_times_for_range_branch1(db: InfrahubDatabase, ba
branch1 = await registry.get_branch(branch="branch1", db=db)

start_times, end_times = branch1.get_branches_and_times_for_range(start_time=Timestamp("1h"), end_time=now)
assert sorted(list(start_times.keys())) == ["branch1", "main"]
assert sorted(list(end_times.keys())) == ["branch1", "main"]
assert sorted(start_times.keys()) == ["branch1", "main"]
assert sorted(end_times.keys()) == ["branch1", "main"]
assert end_times["branch1"] == now.to_string()
assert end_times["main"] == now.to_string()
assert start_times["branch1"] == base_dataset_02["time_m45"]
Expand All @@ -239,8 +239,8 @@ async def test_get_branches_and_times_for_range_branch1(db: InfrahubDatabase, ba
t1 = Timestamp("2s")
t10 = Timestamp("10s")
start_times, end_times = branch1.get_branches_and_times_for_range(start_time=t10, end_time=t1)
assert sorted(list(start_times.keys())) == ["branch1", "main"]
assert sorted(list(end_times.keys())) == ["branch1", "main"]
assert sorted(start_times.keys()) == ["branch1", "main"]
assert sorted(end_times.keys()) == ["branch1", "main"]
assert end_times["branch1"] == t1.to_string()
assert end_times["main"] == t1.to_string()
assert start_times["branch1"] == t10.to_string()
Expand All @@ -252,8 +252,8 @@ async def test_get_branches_and_times_for_range_branch2(db: InfrahubDatabase, ba
branch2 = await registry.get_branch(branch="branch2", db=db)

start_times, end_times = branch2.get_branches_and_times_for_range(start_time=Timestamp("1h"), end_time=now)
assert sorted(list(start_times.keys())) == ["branch2", "main"]
assert sorted(list(end_times.keys())) == ["branch2", "main"]
assert sorted(start_times.keys()) == ["branch2", "main"]
assert sorted(end_times.keys()) == ["branch2", "main"]
assert end_times["branch2"] == now.to_string()
assert end_times["main"] == now.to_string()
assert start_times["branch2"] == base_dataset_03["time_m90"]
Expand All @@ -262,8 +262,8 @@ async def test_get_branches_and_times_for_range_branch2(db: InfrahubDatabase, ba
t1 = Timestamp("2s")
t10 = Timestamp("10s")
start_times, end_times = branch2.get_branches_and_times_for_range(start_time=t10, end_time=t1)
assert sorted(list(start_times.keys())) == ["branch2", "main"]
assert sorted(list(end_times.keys())) == ["branch2", "main"]
assert sorted(start_times.keys()) == ["branch2", "main"]
assert sorted(end_times.keys()) == ["branch2", "main"]
assert end_times["branch2"] == t1.to_string()
assert end_times["main"] == t1.to_string()
assert start_times["branch2"] == t10.to_string()
Expand Down
2 changes: 1 addition & 1 deletion backend/tests/unit/core/test_branch_diff.py
Original file line number Diff line number Diff line change
Expand Up @@ -1064,7 +1064,7 @@ async def test_diff_schema_changes(
diff = BranchDiffer(db=db, branch=branch2)
summary = await diff.get_schema_summary()
assert list(summary.keys()) == ["branch2", "main"]
assert set([element.kind for elements in summary.values() for element in elements]) == {
assert {element.kind for elements in summary.values() for element in elements} == {
"SchemaNode",
"SchemaAttribute",
"SchemaRelationship",
Expand Down
2 changes: 1 addition & 1 deletion backend/tests/unit/core/test_node_query.py
Original file line number Diff line number Diff line change
Expand Up @@ -421,7 +421,7 @@ async def test_query_NodeGetHierarchyQuery_ancestors(
branch=default_branch,
)
await query.execute(db=db)
assert sorted(list(query.get_peer_ids())) == sorted([paris.id, europe.id])
assert sorted(query.get_peer_ids()) == sorted([paris.id, europe.id])


async def test_query_NodeGetHierarchyQuery_filters(
Expand Down
2 changes: 1 addition & 1 deletion backend/tests/unit/core/test_query_branch.py
Original file line number Diff line number Diff line change
Expand Up @@ -12,5 +12,5 @@ async def test_GetAllBranchInternalRelationshipQuery(db: InfrahubDatabase, defau

assert len(query.results)

unique_ids = set([result.get("r").element_id for result in query.results])
unique_ids = {result.get("r").element_id for result in query.results}
assert len(unique_ids) == len(query.results)
4 changes: 2 additions & 2 deletions backend/tests/unit/git/test_git_repository.py
Original file line number Diff line number Diff line change
Expand Up @@ -239,15 +239,15 @@ async def test_get_branches_from_local(git_repo_01: InfrahubRepository):

local_branches = repo.get_branches_from_local()
assert isinstance(local_branches, dict)
assert sorted(list(local_branches.keys())) == ["main"]
assert sorted(local_branches.keys()) == ["main"]


async def test_get_branches_from_remote(git_repo_01: InfrahubRepository):
repo = git_repo_01

remote_branches = repo.get_branches_from_remote()
assert isinstance(remote_branches, dict)
assert sorted(list(remote_branches.keys())) == ["branch01", "branch02", "clean-branch", "main"]
assert sorted(remote_branches.keys()) == ["branch01", "branch02", "clean-branch", "main"]


async def test_get_branches_from_graph(
Expand Down
2 changes: 1 addition & 1 deletion backend/tests/unit/graphql/test_diff_tree_query.py
Original file line number Diff line number Diff line change
Expand Up @@ -941,4 +941,4 @@ async def test_diff_get_filters(
)

assert result.errors is None
assert set([node["label"] for node in result.data["DiffTree"]["nodes"]]) == set(labels)
assert {node["label"] for node in result.data["DiffTree"]["nodes"]} == set(labels)
2 changes: 1 addition & 1 deletion backend/tests/unit/graphql/test_graphql_query.py
Original file line number Diff line number Diff line change
Expand Up @@ -2365,7 +2365,7 @@ async def test_query_relationship_node_property(
assert result.errors is None

results = {item["node"]["name"]["value"]: item["node"] for item in result.data["TestPerson"]["edges"]}
assert sorted(list(results.keys())) == ["Jane", "John"]
assert sorted(results.keys()) == ["Jane", "John"]
assert len(results["John"]["cars"]["edges"]) == 1
assert len(results["Jane"]["cars"]["edges"]) == 1

Expand Down
26 changes: 13 additions & 13 deletions backend/tests/unit/graphql/test_manager.py
Original file line number Diff line number Diff line change
Expand Up @@ -23,7 +23,7 @@ async def test_generate_interface_object(db: InfrahubDatabase, default_branch: B
assert inspect.isclass(result)
assert issubclass(result, graphene.Interface)
assert result._meta.name == "TestVehicule"
assert sorted(list(result._meta.fields.keys())) == ["description", "display_label", "hfid", "id", "name"]
assert sorted(result._meta.fields.keys()) == ["description", "display_label", "hfid", "id", "name"]


async def test_generate_graphql_object(db: InfrahubDatabase, default_branch: Branch, criticality_schema):
Expand All @@ -36,7 +36,7 @@ async def test_generate_graphql_object(db: InfrahubDatabase, default_branch: Bra
assert inspect.isclass(result)
assert issubclass(result, InfrahubObject)
assert result._meta.name == "TestCriticality"
assert sorted(list(result._meta.fields.keys())) == [
assert sorted(result._meta.fields.keys()) == [
"_updated_at",
"color",
"description",
Expand Down Expand Up @@ -67,7 +67,7 @@ async def test_generate_graphql_object_with_interface(
assert inspect.isclass(result)
assert issubclass(result, InfrahubObject)
assert result._meta.name == "TestCar"
assert sorted(list(result._meta.fields.keys())) == [
assert sorted(result._meta.fields.keys()) == [
"_updated_at",
"description",
"display_label",
Expand All @@ -87,7 +87,7 @@ async def test_generate_graphql_mutation_create(db: InfrahubDatabase, default_br
input_type = gqlm.generate_graphql_mutation_create_input(schema=criticality_schema)
result = gqlm.generate_graphql_mutation_create(schema=criticality_schema, input_type=input_type)
assert result._meta.name == "TestCriticalityCreate"
assert sorted(list(result._meta.fields.keys())) == ["object", "ok"]
assert sorted(result._meta.fields.keys()) == ["object", "ok"]


async def test_generate_graphql_mutation_update(db: InfrahubDatabase, default_branch: Branch, criticality_schema):
Expand All @@ -99,7 +99,7 @@ async def test_generate_graphql_mutation_update(db: InfrahubDatabase, default_br
input_type = gqlm.generate_graphql_mutation_update_input(schema=criticality_schema)
result = gqlm.generate_graphql_mutation_update(schema=criticality_schema, input_type=input_type)
assert result._meta.name == "TestCriticalityUpdate"
assert sorted(list(result._meta.fields.keys())) == ["object", "ok"]
assert sorted(result._meta.fields.keys()) == ["object", "ok"]


async def test_generate_object_types(db: InfrahubDatabase, default_branch: Branch, data_schema, car_person_schema):
Expand All @@ -124,7 +124,7 @@ async def test_generate_object_types(db: InfrahubDatabase, default_branch: Branc
assert issubclass(nested_edged_person, InfrahubObject)
assert issubclass(relationship_property, graphene.ObjectType)

assert sorted(list(car._meta.fields.keys())) == [
assert sorted(car._meta.fields.keys()) == [
"_updated_at",
"color",
"display_label",
Expand All @@ -140,13 +140,13 @@ async def test_generate_object_types(db: InfrahubDatabase, default_branch: Branc
"transmission",
]

assert sorted(list(edged_car._meta.fields.keys())) == ["node"]
assert sorted(edged_car._meta.fields.keys()) == ["node"]
assert str(edged_car._meta.fields["node"].type) == "TestCar"
assert sorted(list(nested_edged_car._meta.fields.keys())) == ["node", "properties"]
assert sorted(nested_edged_car._meta.fields.keys()) == ["node", "properties"]
assert str(nested_edged_car._meta.fields["node"].type) == "TestCar"
assert str(nested_edged_car._meta.fields["properties"].type) == "RelationshipProperty"

assert sorted(list(person._meta.fields.keys())) == [
assert sorted(person._meta.fields.keys()) == [
"_updated_at",
"cars",
"display_label",
Expand All @@ -158,12 +158,12 @@ async def test_generate_object_types(db: InfrahubDatabase, default_branch: Branc
"profiles",
"subscriber_of_groups",
]
assert sorted(list(edged_person._meta.fields.keys())) == ["node"]
assert sorted(edged_person._meta.fields.keys()) == ["node"]
assert str(edged_person._meta.fields["node"].type) == "TestPerson"
assert sorted(list(nested_edged_person._meta.fields.keys())) == ["node", "properties"]
assert sorted(nested_edged_person._meta.fields.keys()) == ["node", "properties"]
assert str(nested_edged_person._meta.fields["node"].type) == "TestPerson"
assert str(nested_edged_person._meta.fields["properties"].type) == "RelationshipProperty"
assert sorted(list(relationship_property._meta.fields.keys())) == [
assert sorted(relationship_property._meta.fields.keys()) == [
"is_protected",
"is_visible",
"owner",
Expand Down Expand Up @@ -260,7 +260,7 @@ async def test_generate_filters(db: InfrahubDatabase, default_branch: Branch, da
"subscriber_of_groups__name__value",
"subscriber_of_groups__name__values",
]
assert sorted(list(filters.keys())) == sorted(expected_filters)
assert sorted(filters.keys()) == sorted(expected_filters)


@pytest.mark.parametrize(
Expand Down
1 change: 1 addition & 0 deletions changelog/2194.changed.md
Original file line number Diff line number Diff line change
@@ -0,0 +1 @@
Activate ruff C4 rule.
4 changes: 0 additions & 4 deletions pyproject.toml
Original file line number Diff line number Diff line change
Expand Up @@ -465,10 +465,6 @@ ignore = [
"B009", # [*] Do not call `getattr` with a constant attribute value. It is not any safer than normal property access.
"B010", # [*] Do not call `setattr` with a constant attribute value. It is not any safer than normal property access.
"B904", # Within an `except` clause, raise exceptions with `raise ... from err` or `raise ... from None` to distinguish them from errors in exception handling
"C403", # Unnecessary `list` comprehension (rewrite as a `set` comprehension)
"C409", # Unnecessary `list` literal passed to `tuple()` (rewrite as a `tuple` literal)
"C414", # Unnecessary `list` call within `sorted()`
"C420", # Unnecessary dict comprehension for iterable; use `dict.fromkeys` instead
"FURB113", # Use `networks.extend(...)` instead of repeatedly calling `networks.append()`
"FURB118", # Use `operator.itemgetter(1)` instead of defining a lambda
"FURB140", # Use `itertools.starmap` instead of the generator
Expand Down
Loading