diff --git a/.github/workflows/black.yml b/.github/workflows/black.yml index 36cb0a43..4c43d629 100644 --- a/.github/workflows/black.yml +++ b/.github/workflows/black.yml @@ -27,7 +27,7 @@ jobs: python-version: "3.11.6" - name: Install Black - run: python -m pip install black==22.3.0 + run: python -m pip install black==24.3.0 - name: Run Black Check run: black . --check \ No newline at end of file diff --git a/backend/analytics_server/mhq/api/deployment_analytics.py b/backend/analytics_server/mhq/api/deployment_analytics.py index 8f482173..dd1ad3e2 100644 --- a/backend/analytics_server/mhq/api/deployment_analytics.py +++ b/backend/analytics_server/mhq/api/deployment_analytics.py @@ -127,9 +127,9 @@ def get_prs_included_in_deployment(deployment_id: str): repo: OrgRepo = pr_analytics_service.get_repo_by_id(deployment.repo_id) - prs: List[ - PullRequest - ] = deployments_service.get_pull_requests_related_to_deployment(deployment) + prs: List[PullRequest] = ( + deployments_service.get_pull_requests_related_to_deployment(deployment) + ) repo_id_map = {repo.id: repo} return get_non_paginated_pr_response( @@ -204,10 +204,10 @@ def get_team_deployment_frequency_trends( deployments_analytics_service = get_deployment_analytics_service() - week_to_deployments_count_map: Dict[ - datetime, int - ] = deployments_analytics_service.get_weekly_deployment_frequency_trends( - team_id, interval, pr_filter, workflow_filter + week_to_deployments_count_map: Dict[datetime, int] = ( + deployments_analytics_service.get_weekly_deployment_frequency_trends( + team_id, interval, pr_filter, workflow_filter + ) ) return { diff --git a/backend/analytics_server/mhq/api/incidents.py b/backend/analytics_server/mhq/api/incidents.py index ea880fba..2821dc48 100644 --- a/backend/analytics_server/mhq/api/incidents.py +++ b/backend/analytics_server/mhq/api/incidents.py @@ -92,9 +92,9 @@ def get_deployments_with_related_incidents( incidents: List[Incident] = incident_service.get_team_incidents(team_id, interval) - deployment_incidents_map: Dict[ - Deployment, List[Incident] - ] = incident_service.get_deployment_incidents_map(deployments, incidents) + deployment_incidents_map: Dict[Deployment, List[Incident]] = ( + incident_service.get_deployment_incidents_map(deployments, incidents) + ) return list( map( @@ -238,10 +238,10 @@ def get_team_cfr_trends( incidents: List[Incident] = incident_service.get_team_incidents(team_id, interval) - team_weekly_change_failure_rate: Dict[ - datetime, ChangeFailureRateMetrics - ] = incident_service.get_weekly_change_failure_rate( - interval, deployments, incidents + team_weekly_change_failure_rate: Dict[datetime, ChangeFailureRateMetrics] = ( + incident_service.get_weekly_change_failure_rate( + interval, deployments, incidents + ) ) return { diff --git a/backend/analytics_server/mhq/api/pull_requests.py b/backend/analytics_server/mhq/api/pull_requests.py index a8b2e277..30238746 100644 --- a/backend/analytics_server/mhq/api/pull_requests.py +++ b/backend/analytics_server/mhq/api/pull_requests.py @@ -159,9 +159,9 @@ def get_team_lead_time_trends( lead_time_service = get_lead_time_service() - weekly_lead_time_metrics_avg_map: Dict[ - datetime, LeadTimeMetrics - ] = lead_time_service.get_team_lead_time_metrics_trends(team, interval, pr_filter) + weekly_lead_time_metrics_avg_map: Dict[datetime, LeadTimeMetrics] = ( + lead_time_service.get_team_lead_time_metrics_trends(team, interval, pr_filter) + ) return { week.isoformat(): adapt_lead_time_metrics(average_lead_time_metrics) diff --git a/backend/analytics_server/mhq/api/resources/incident_resources.py b/backend/analytics_server/mhq/api/resources/incident_resources.py index 72e1cccf..6e300130 100644 --- a/backend/analytics_server/mhq/api/resources/incident_resources.py +++ b/backend/analytics_server/mhq/api/resources/incident_resources.py @@ -22,12 +22,14 @@ def adapt_incident( "provider": incident.provider, "status": incident.status, "creation_date": incident.creation_date.isoformat(), - "resolved_date": incident.resolved_date.isoformat() - if incident.resolved_date - else None, - "acknowledged_date": incident.acknowledged_date.isoformat() - if incident.acknowledged_date - else None, + "resolved_date": ( + incident.resolved_date.isoformat() if incident.resolved_date else None + ), + "acknowledged_date": ( + incident.acknowledged_date.isoformat() + if incident.acknowledged_date + else None + ), "assigned_to": adapt_user_info(incident.assigned_to, username_user_map), "assignees": list( map( diff --git a/backend/analytics_server/mhq/exapi/git_incidents.py b/backend/analytics_server/mhq/exapi/git_incidents.py index 6ecad49b..950eb6a7 100644 --- a/backend/analytics_server/mhq/exapi/git_incidents.py +++ b/backend/analytics_server/mhq/exapi/git_incidents.py @@ -40,10 +40,10 @@ def get_org_repo(self, repo_id: str): def get_repo_revert_prs_in_interval( self, repo_id: str, from_time: datetime, to_time: datetime ) -> List[RevertPRMap]: - revert_pr_mappings: List[ - PullRequestRevertPRMapping - ] = self.code_repo_service.get_repo_revert_prs_mappings_updated_in_interval( - repo_id, from_time, to_time + revert_pr_mappings: List[PullRequestRevertPRMapping] = ( + self.code_repo_service.get_repo_revert_prs_mappings_updated_in_interval( + repo_id, from_time, to_time + ) ) revert_pr_ids = [str(pr.pr_id) for pr in revert_pr_mappings] diff --git a/backend/analytics_server/mhq/service/code/integration.py b/backend/analytics_server/mhq/service/code/integration.py index 77ce8f00..24ff1d20 100644 --- a/backend/analytics_server/mhq/service/code/integration.py +++ b/backend/analytics_server/mhq/service/code/integration.py @@ -13,10 +13,10 @@ def __init__(self, core_repo_service: CoreRepoService): self.core_repo_service = core_repo_service def get_org_providers(self, org_id: str) -> List[str]: - integrations: List[ - Integration - ] = self.core_repo_service.get_org_integrations_for_names( - org_id, CODE_INTEGRATION_BUCKET + integrations: List[Integration] = ( + self.core_repo_service.get_org_integrations_for_names( + org_id, CODE_INTEGRATION_BUCKET + ) ) if not integrations: return [] diff --git a/backend/analytics_server/mhq/service/code/lead_time.py b/backend/analytics_server/mhq/service/code/lead_time.py index 598168d5..3122bf78 100644 --- a/backend/analytics_server/mhq/service/code/lead_time.py +++ b/backend/analytics_server/mhq/service/code/lead_time.py @@ -54,15 +54,15 @@ def get_team_lead_time_metrics_trends( set(self._get_team_repos_lead_time_metrics(team_repos, interval, pr_filter)) ) - weekly_lead_time_metrics_map: Dict[ - datetime, List[LeadTimeMetrics] - ] = generate_expanded_buckets( - lead_time_metrics, interval, "merged_at", "weekly" + weekly_lead_time_metrics_map: Dict[datetime, List[LeadTimeMetrics]] = ( + generate_expanded_buckets( + lead_time_metrics, interval, "merged_at", "weekly" + ) ) - weekly_lead_time_metrics_avg_map: Dict[ - datetime, LeadTimeMetrics - ] = self.get_avg_lead_time_metrics_from_map(weekly_lead_time_metrics_map) + weekly_lead_time_metrics_avg_map: Dict[datetime, LeadTimeMetrics] = ( + self.get_avg_lead_time_metrics_from_map(weekly_lead_time_metrics_map) + ) weekly_lead_time_metrics_avg_map = fill_missing_week_buckets( weekly_lead_time_metrics_avg_map, interval, LeadTimeMetrics @@ -176,10 +176,10 @@ def _get_lead_time_prs_for_repos_using_workflow_deployments( pr_filter: PRFilter = None, ) -> List[PullRequest]: - team_repos_with_workflow_deployments_configured: List[ - TeamRepos - ] = self._deployments_service.get_filtered_team_repos_with_workflow_configured_deployments( - team_repos + team_repos_with_workflow_deployments_configured: List[TeamRepos] = ( + self._deployments_service.get_filtered_team_repos_with_workflow_configured_deployments( + team_repos + ) ) repo_ids = [ @@ -214,9 +214,11 @@ def _get_lead_time_prs_for_repos_using_pr_deployments( def _get_lead_time_metrics_for_pr(self, pr: PullRequest) -> LeadTimeMetrics: return LeadTimeMetrics( - first_commit_to_open=pr.first_commit_to_open - if pr.first_commit_to_open is not None and pr.first_commit_to_open > 0 - else 0, + first_commit_to_open=( + pr.first_commit_to_open + if pr.first_commit_to_open is not None and pr.first_commit_to_open > 0 + else 0 + ), first_response_time=pr.first_response_time if pr.first_response_time else 0, rework_time=pr.rework_time if pr.rework_time else 0, merge_time=pr.merge_time if pr.merge_time else 0, diff --git a/backend/analytics_server/mhq/service/code/repository_service.py b/backend/analytics_server/mhq/service/code/repository_service.py index 2ecfaf62..9ba33568 100644 --- a/backend/analytics_server/mhq/service/code/repository_service.py +++ b/backend/analytics_server/mhq/service/code/repository_service.py @@ -194,9 +194,9 @@ def _adapt_org_incident_service( name=org_repo.name, key=str(org_repo.id), meta={}, - created_at=org_incident_service.created_at - if org_incident_service - else time_now(), + created_at=( + org_incident_service.created_at if org_incident_service else time_now() + ), updated_at=time_now(), source_type=IncidentSource.GIT_REPO, ) diff --git a/backend/analytics_server/mhq/service/code/sync/etl_code_analytics.py b/backend/analytics_server/mhq/service/code/sync/etl_code_analytics.py index 75ba0e9c..fb85dd21 100644 --- a/backend/analytics_server/mhq/service/code/sync/etl_code_analytics.py +++ b/backend/analytics_server/mhq/service/code/sync/etl_code_analytics.py @@ -92,9 +92,11 @@ def get_pr_performance(pr: PullRequest, pr_events: [PullRequestEvent]): cycle_time = cycle_time.total_seconds() return PRPerformance( - first_review_time=(first_review.created_at - pr.created_at).total_seconds() - if first_review - else -1, + first_review_time=( + (first_review.created_at - pr.created_at).total_seconds() + if first_review + else -1 + ), rework_time=rework_time, merge_time=merge_time, cycle_time=cycle_time if pr.state == PullRequestState.MERGED else -1, diff --git a/backend/analytics_server/mhq/service/code/sync/etl_github_handler.py b/backend/analytics_server/mhq/service/code/sync/etl_github_handler.py index d89acd33..d5b80d2c 100644 --- a/backend/analytics_server/mhq/service/code/sync/etl_github_handler.py +++ b/backend/analytics_server/mhq/service/code/sync/etl_github_handler.py @@ -166,9 +166,9 @@ def process_pr( pr_model: Optional[PullRequest] = self.code_repo_service.get_repo_pr_by_number( repo_id, pr.number ) - pr_event_model_list: List[ - PullRequestEvent - ] = self.code_repo_service.get_pr_events(pr_model) + pr_event_model_list: List[PullRequestEvent] = ( + self.code_repo_service.get_pr_events(pr_model) + ) pr_commits_model_list: List = [] reviews: List[GithubPullRequestReview] = list(self._api.get_pr_reviews(pr)) @@ -340,9 +340,11 @@ def _to_pr_commits( url=commit["html_url"], data=commit, message=commit["commit"]["message"], - author=commit["author"]["login"] - if commit.get("author") - else commit["commit"].get("committer", {}).get("email", ""), + author=( + commit["author"]["login"] + if commit.get("author") + else commit["commit"].get("committer", {}).get("email", "") + ), created_at=self._dt_from_github_dt_string( commit["commit"]["committer"]["date"] ), diff --git a/backend/analytics_server/mhq/service/code/sync/revert_prs_github_sync.py b/backend/analytics_server/mhq/service/code/sync/revert_prs_github_sync.py index b4696b2a..17eb4aa2 100644 --- a/backend/analytics_server/mhq/service/code/sync/revert_prs_github_sync.py +++ b/backend/analytics_server/mhq/service/code/sync/revert_prs_github_sync.py @@ -70,10 +70,10 @@ def _get_revert_pr_mapping_for_original_prs( if len(pr_numbers_match_strings) == 0: return [] - revert_prs: List[ - PullRequest - ] = self.code_repo_service.get_prs_by_head_branch_match_strings( - list(repo_ids), pr_numbers_match_strings + revert_prs: List[PullRequest] = ( + self.code_repo_service.get_prs_by_head_branch_match_strings( + list(repo_ids), pr_numbers_match_strings + ) ) revert_pr_mappings: List[PullRequestRevertPRMapping] = [] @@ -136,10 +136,10 @@ def _get_revert_pr_mapping_for_revert_prs( if len(revert_pr_numbers) == 0: return [] - reverted_prs: List[ - PullRequest - ] = self.code_repo_service.get_reverted_prs_by_numbers( - list(repo_ids), revert_pr_numbers + reverted_prs: List[PullRequest] = ( + self.code_repo_service.get_reverted_prs_by_numbers( + list(repo_ids), revert_pr_numbers + ) ) revert_pr_mappings: List[PullRequestRevertPRMapping] = [] diff --git a/backend/analytics_server/mhq/service/deployments/analytics.py b/backend/analytics_server/mhq/service/deployments/analytics.py index 8fe7c4e8..e7ecaa80 100644 --- a/backend/analytics_server/mhq/service/deployments/analytics.py +++ b/backend/analytics_server/mhq/service/deployments/analytics.py @@ -43,27 +43,27 @@ def get_team_all_deployments_in_interval_with_related_prs( related pull requests. Each deployment is associated with a list of pull requests that contributed to it. """ - deployments: List[ - Deployment - ] = self.deployments_service.get_team_all_deployments_in_interval( - team_id, interval, pr_filter, workflow_filter + deployments: List[Deployment] = ( + self.deployments_service.get_team_all_deployments_in_interval( + team_id, interval, pr_filter, workflow_filter + ) ) team_repos: List[TeamRepos] = self._get_team_repos_by_team_id(team_id) repo_ids: List[str] = [str(team_repo.org_repo_id) for team_repo in team_repos] - pull_requests: List[ - PullRequest - ] = self.code_repo_service.get_prs_merged_in_interval( - repo_ids, interval, pr_filter + pull_requests: List[PullRequest] = ( + self.code_repo_service.get_prs_merged_in_interval( + repo_ids, interval, pr_filter + ) ) - repo_id_branch_to_pr_list_map: Dict[ - Tuple[str, str], List[PullRequest] - ] = self._map_prs_to_repo_id_and_base_branch(pull_requests) - repo_id_branch_to_deployments_map: Dict[ - Tuple[str, str], List[Deployment] - ] = self._map_deployments_to_repo_id_and_head_branch(deployments) + repo_id_branch_to_pr_list_map: Dict[Tuple[str, str], List[PullRequest]] = ( + self._map_prs_to_repo_id_and_base_branch(pull_requests) + ) + repo_id_branch_to_deployments_map: Dict[Tuple[str, str], List[Deployment]] = ( + self._map_deployments_to_repo_id_and_head_branch(deployments) + ) repo_id_to_deployments_with_pr_map: Dict[ str, Dict[Deployment, List[PullRequest]] @@ -76,9 +76,9 @@ def get_team_all_deployments_in_interval_with_related_prs( relevant_prs: List[PullRequest] = repo_id_branch_to_pr_list_map.get( (repo_id, base_branch), [] ) - deployments_pr_map: Dict[ - Deployment, List[PullRequest] - ] = self._map_prs_to_deployments(relevant_prs, deployments) + deployments_pr_map: Dict[Deployment, List[PullRequest]] = ( + self._map_prs_to_deployments(relevant_prs, deployments) + ) repo_id_to_deployments_with_pr_map[repo_id].update(deployments_pr_map) @@ -137,9 +137,9 @@ def _map_prs_to_repo_id_and_base_branch( def _map_deployments_to_repo_id_and_head_branch( self, deployments: List[Deployment] ) -> Dict[Tuple[str, str], List[Deployment]]: - repo_id_branch_deployments_map: Dict[ - Tuple[str, str], List[Deployment] - ] = defaultdict(list) + repo_id_branch_deployments_map: Dict[Tuple[str, str], List[Deployment]] = ( + defaultdict(list) + ) for deployment in deployments: repo_id = str(deployment.repo_id) head_branch = deployment.head_branch @@ -182,9 +182,9 @@ def _get_deployment_frequency_from_date_to_deployment_map( This method takes a dict of datetime representing (day/week/month) to Deployments and returns avg deployment frequency """ - date_to_deployment_count_map: Dict[ - datetime, int - ] = get_key_to_count_map_from_key_to_list_map(date_to_deployment_map) + date_to_deployment_count_map: Dict[datetime, int] = ( + get_key_to_count_map_from_key_to_list_map(date_to_deployment_map) + ) return get_average_of_dict_values(date_to_deployment_count_map) diff --git a/backend/analytics_server/mhq/service/deployments/deployment_service.py b/backend/analytics_server/mhq/service/deployments/deployment_service.py index 76cd399b..6a268d1f 100644 --- a/backend/analytics_server/mhq/service/deployments/deployment_service.py +++ b/backend/analytics_server/mhq/service/deployments/deployment_service.py @@ -64,19 +64,19 @@ def get_filtered_team_repos_with_workflow_configured_deployments( Get team repos with workflow deployments configured. That is the repo has a workflow configured and team repo has deployment type as workflow. """ - filtered_team_repos: List[ - TeamRepos - ] = self._filter_team_repos_using_workflow_deployments(team_repos) + filtered_team_repos: List[TeamRepos] = ( + self._filter_team_repos_using_workflow_deployments(team_repos) + ) repo_ids = [str(tr.org_repo_id) for tr in filtered_team_repos] repo_id_to_team_repo_map = { str(tr.org_repo_id): tr for tr in filtered_team_repos } - repo_workflows: List[ - RepoWorkflow - ] = self.workflow_repo_service.get_repo_workflow_by_repo_ids( - repo_ids, RepoWorkflowType.DEPLOYMENT + repo_workflows: List[RepoWorkflow] = ( + self.workflow_repo_service.get_repo_workflow_by_repo_ids( + repo_ids, RepoWorkflowType.DEPLOYMENT + ) ) workflows_repo_ids = list( set([str(workflow.org_repo_id) for workflow in repo_workflows]) diff --git a/backend/analytics_server/mhq/service/deployments/pr_deployments_service.py b/backend/analytics_server/mhq/service/deployments/pr_deployments_service.py index 48021250..44eb15ec 100644 --- a/backend/analytics_server/mhq/service/deployments/pr_deployments_service.py +++ b/backend/analytics_server/mhq/service/deployments/pr_deployments_service.py @@ -22,10 +22,10 @@ def __init__( def get_repos_successful_deployments_in_interval( self, repo_ids: List[str], interval: Interval, pr_filter: PRFilter ) -> List[Deployment]: - pull_requests: List[ - PullRequest - ] = self.code_repo_service.get_prs_merged_in_interval( - repo_ids, interval, pr_filter=pr_filter + pull_requests: List[PullRequest] = ( + self.code_repo_service.get_prs_merged_in_interval( + repo_ids, interval, pr_filter=pr_filter + ) ) return self.deployments_adapter.adapt_many(pull_requests) diff --git a/backend/analytics_server/mhq/service/deployments/workflow_deployments_service.py b/backend/analytics_server/mhq/service/deployments/workflow_deployments_service.py index cd05705a..04e010a8 100644 --- a/backend/analytics_server/mhq/service/deployments/workflow_deployments_service.py +++ b/backend/analytics_server/mhq/service/deployments/workflow_deployments_service.py @@ -30,10 +30,10 @@ def __init__( def get_repos_successful_deployments_in_interval( self, repo_ids: List[str], interval: Interval, workflow_filter: WorkflowFilter ) -> List[Deployment]: - repo_workflow_runs: List[ - Tuple[RepoWorkflow, RepoWorkflowRuns] - ] = self.workflow_repo_service.get_successful_repo_workflows_runs_by_repo_ids( - repo_ids, interval, workflow_filter + repo_workflow_runs: List[Tuple[RepoWorkflow, RepoWorkflowRuns]] = ( + self.workflow_repo_service.get_successful_repo_workflows_runs_by_repo_ids( + repo_ids, interval, workflow_filter + ) ) return self.deployments_adapter.adapt_many(repo_workflow_runs) @@ -43,10 +43,10 @@ def get_repos_all_deployments_in_interval( interval: Interval, workflow_filter: WorkflowFilter, ) -> List[Deployment]: - repo_workflow_runs: List[ - Tuple[RepoWorkflow, RepoWorkflowRuns] - ] = self.workflow_repo_service.get_repos_workflow_runs_by_repo_ids( - repo_ids, interval, workflow_filter + repo_workflow_runs: List[Tuple[RepoWorkflow, RepoWorkflowRuns]] = ( + self.workflow_repo_service.get_repos_workflow_runs_by_repo_ids( + repo_ids, interval, workflow_filter + ) ) return self.deployments_adapter.adapt_many(repo_workflow_runs) @@ -58,23 +58,23 @@ def get_pull_requests_related_to_deployment( ) interval = Interval(previous_deployment.conducted_at, deployment.conducted_at) pr_base_branch: str = deployment.head_branch - pull_requests: List[ - PullRequest - ] = self.code_repo_service.get_prs_merged_in_interval( - [deployment.repo_id], interval, base_branches=[pr_base_branch] + pull_requests: List[PullRequest] = ( + self.code_repo_service.get_prs_merged_in_interval( + [deployment.repo_id], interval, base_branches=[pr_base_branch] + ) ) - relevant_prs: List[ - PullRequest - ] = self.deployment_pr_mapping_service.get_all_prs_deployed( - pull_requests, deployment + relevant_prs: List[PullRequest] = ( + self.deployment_pr_mapping_service.get_all_prs_deployed( + pull_requests, deployment + ) ) return relevant_prs def get_deployment_by_entity_id(self, entity_id: str) -> Deployment: - repo_workflow_run: Tuple[ - RepoWorkflow, RepoWorkflowRuns - ] = self.workflow_repo_service.get_repo_workflow_run_by_id(entity_id) + repo_workflow_run: Tuple[RepoWorkflow, RepoWorkflowRuns] = ( + self.workflow_repo_service.get_repo_workflow_run_by_id(entity_id) + ) if not repo_workflow_run: raise ValueError(f"Workflow run with id {entity_id} not found") return self.deployments_adapter.adapt(repo_workflow_run) @@ -86,7 +86,7 @@ def _get_previous_deployment_for_given_deployment( workflow_run, current_workflow_run, ) = self.workflow_repo_service.get_repo_workflow_run_by_id(deployment.entity_id) - workflow_run_previous_workflow_run: Tuple[ - RepoWorkflow, RepoWorkflowRuns - ] = self.workflow_repo_service.get_previous_workflow_run(current_workflow_run) + workflow_run_previous_workflow_run: Tuple[RepoWorkflow, RepoWorkflowRuns] = ( + self.workflow_repo_service.get_previous_workflow_run(current_workflow_run) + ) return self.deployments_adapter.adapt(workflow_run_previous_workflow_run) diff --git a/backend/analytics_server/mhq/service/incidents/incidents.py b/backend/analytics_server/mhq/service/incidents/incidents.py index 99b14ce9..20b89e39 100644 --- a/backend/analytics_server/mhq/service/incidents/incidents.py +++ b/backend/analytics_server/mhq/service/incidents/incidents.py @@ -76,9 +76,9 @@ def get_deployment_incidents_map( current_deployment_incidents = [] if incidents_pointer >= len(incidents): - deployment_incidents_map[ - current_deployment - ] = current_deployment_incidents + deployment_incidents_map[current_deployment] = ( + current_deployment_incidents + ) continue while incidents_pointer < len(incidents): @@ -197,10 +197,10 @@ def _get_incidents_mean_time_to_recovery_trends( self, resolved_incidents: List[Incident], interval: Interval ) -> Dict[datetime, MeanTimeToRecoveryMetrics]: - weekly_resolved_team_incidents: Dict[ - datetime, List[Incident] - ] = generate_expanded_buckets( - resolved_incidents, interval, "resolved_date", "weekly" + weekly_resolved_team_incidents: Dict[datetime, List[Incident]] = ( + generate_expanded_buckets( + resolved_incidents, interval, "resolved_date", "weekly" + ) ) weekly_mean_time_to_recovery: Dict[datetime, MeanTimeToRecoveryMetrics] = {} @@ -208,9 +208,9 @@ def _get_incidents_mean_time_to_recovery_trends( for week, incidents in weekly_resolved_team_incidents.items(): if incidents: - weekly_mean_time_to_recovery[ - week - ] = self._get_incidents_mean_time_to_recovery(incidents) + weekly_mean_time_to_recovery[week] = ( + self._get_incidents_mean_time_to_recovery(incidents) + ) else: weekly_mean_time_to_recovery[week] = MeanTimeToRecoveryMetrics() diff --git a/backend/analytics_server/mhq/service/incidents/integration.py b/backend/analytics_server/mhq/service/incidents/integration.py index 9aaf72bf..fd10e09a 100644 --- a/backend/analytics_server/mhq/service/incidents/integration.py +++ b/backend/analytics_server/mhq/service/incidents/integration.py @@ -17,10 +17,10 @@ def __init__( self.settings_service = settings_service def get_org_providers(self, org_id: str) -> List[str]: - integrations: List[ - Integration - ] = self.core_repo_service.get_org_integrations_for_names( - org_id, self._get_possible_incident_providers(org_id) + integrations: List[Integration] = ( + self.core_repo_service.get_org_integrations_for_names( + org_id, self._get_possible_incident_providers(org_id) + ) ) if not integrations: return [] diff --git a/backend/analytics_server/mhq/service/incidents/sync/etl_git_incidents_handler.py b/backend/analytics_server/mhq/service/incidents/sync/etl_git_incidents_handler.py index fd0cece3..2ef73c3c 100644 --- a/backend/analytics_server/mhq/service/incidents/sync/etl_git_incidents_handler.py +++ b/backend/analytics_server/mhq/service/incidents/sync/etl_git_incidents_handler.py @@ -92,10 +92,10 @@ def process_service_incidents( from_time: datetime = bookmark.bookmark to_time: datetime = time_now() - revert_pr_incidents: List[ - RevertPRMap - ] = self.git_incidents_api_service.get_repo_revert_prs_in_interval( - incident_service.key, from_time, to_time + revert_pr_incidents: List[RevertPRMap] = ( + self.git_incidents_api_service.get_repo_revert_prs_in_interval( + incident_service.key, from_time, to_time + ) ) if not revert_pr_incidents: LOG.warning( @@ -152,12 +152,12 @@ def _process_revert_pr_incident( self, org_incident_service: OrgIncidentService, revert_pr_map: RevertPRMap ) -> Tuple[Incident, IncidentOrgIncidentServiceMap]: incident_unique_id = str(revert_pr_map.original_pr.id) - existing_incident: Optional[ - Incident - ] = self.incidents_repo_service.get_incident_by_key_type_and_provider( - incident_unique_id, - IncidentType.REVERT_PR, - IncidentProvider(org_incident_service.provider), + existing_incident: Optional[Incident] = ( + self.incidents_repo_service.get_incident_by_key_type_and_provider( + incident_unique_id, + IncidentType.REVERT_PR, + IncidentProvider(org_incident_service.provider), + ) ) incident_id = existing_incident.id if existing_incident else uuid4_str() @@ -180,9 +180,9 @@ def _process_revert_pr_incident( "created_at": revert_pr_map.revert_pr.created_at.isoformat(), "updated_at": revert_pr_map.revert_pr.updated_at.isoformat(), }, - created_at=existing_incident.created_at - if existing_incident - else time_now(), + created_at=( + existing_incident.created_at if existing_incident else time_now() + ), updated_at=time_now(), incident_type=IncidentType.REVERT_PR, ) @@ -206,9 +206,9 @@ def _adapt_org_incident_service( name=org_repo.name, key=str(org_repo.id), meta={}, - created_at=org_incident_service.created_at - if org_incident_service - else time_now(), + created_at=( + org_incident_service.created_at if org_incident_service else time_now() + ), updated_at=time_now(), source_type=IncidentSource.GIT_REPO, ) @@ -226,9 +226,9 @@ def _adapt_pr_to_json(pr: PullRequest) -> Dict[str, any]: "url": pr.url, "base_branch": pr.base_branch, "head_branch": pr.head_branch, - "state_changed_at": pr.state_changed_at.isoformat() - if pr.state_changed_at - else None, + "state_changed_at": ( + pr.state_changed_at.isoformat() if pr.state_changed_at else None + ), "commits": pr.commits, "comments": pr.comments, "provider": pr.provider, diff --git a/backend/analytics_server/mhq/service/incidents/sync/etl_handler.py b/backend/analytics_server/mhq/service/incidents/sync/etl_handler.py index 5481e6cd..a926a77a 100644 --- a/backend/analytics_server/mhq/service/incidents/sync/etl_handler.py +++ b/backend/analytics_server/mhq/service/incidents/sync/etl_handler.py @@ -91,9 +91,9 @@ def __get_incidents_bookmark( def sync_org_incidents(org_id: str): - incident_providers: List[ - str - ] = get_incidents_integration_service().get_org_providers(org_id) + incident_providers: List[str] = ( + get_incidents_integration_service().get_org_providers(org_id) + ) if not incident_providers: LOG.info(f"No incident providers found for org {org_id}") return diff --git a/backend/analytics_server/mhq/service/merge_to_deploy_broker/mtd_handler.py b/backend/analytics_server/mhq/service/merge_to_deploy_broker/mtd_handler.py index 282789c5..ed147aab 100644 --- a/backend/analytics_server/mhq/service/merge_to_deploy_broker/mtd_handler.py +++ b/backend/analytics_server/mhq/service/merge_to_deploy_broker/mtd_handler.py @@ -54,9 +54,9 @@ def _process_deployments_for_merge_to_deploy_caching(self, repo_id: str): if not org_repo: Exception(f"Repo with {repo_id} not found") - repo_workflows: List[ - RepoWorkflow - ] = self.workflow_repo_service.get_repo_workflows_by_repo_id(repo_id) + repo_workflows: List[RepoWorkflow] = ( + self.workflow_repo_service.get_repo_workflows_by_repo_id(repo_id) + ) if not repo_workflows: return @@ -68,10 +68,10 @@ def _process_deployments_for_merge_to_deploy_caching(self, repo_id: str): bookmark_time: datetime = broker_bookmark.bookmark_date - repo_workflow_runs: List[ - RepoWorkflowRuns - ] = self.workflow_repo_service.get_repo_workflow_runs_conducted_after_time( - repo_id, bookmark_time, DEPLOYMENTS_TO_PROCESS + repo_workflow_runs: List[RepoWorkflowRuns] = ( + self.workflow_repo_service.get_repo_workflow_runs_conducted_after_time( + repo_id, bookmark_time, DEPLOYMENTS_TO_PROCESS + ) ) if not repo_workflow_runs: @@ -99,15 +99,15 @@ def _cache_prs_merge_to_deploy_for_repo_workflow_run( return conducted_at: datetime = repo_workflow_run.conducted_at - relevant_prs: List[ - PullRequest - ] = self.code_repo_service.get_prs_in_repo_merged_before_given_date_with_merge_to_deploy_as_null( - repo_id, conducted_at + relevant_prs: List[PullRequest] = ( + self.code_repo_service.get_prs_in_repo_merged_before_given_date_with_merge_to_deploy_as_null( + repo_id, conducted_at + ) ) - prs_to_update: List[ - PullRequest - ] = self.deployment_pr_mapper_service.get_all_prs_deployed( - relevant_prs, repo_workflow_run + prs_to_update: List[PullRequest] = ( + self.deployment_pr_mapper_service.get_all_prs_deployed( + relevant_prs, repo_workflow_run + ) ) for pr in prs_to_update: diff --git a/backend/analytics_server/mhq/service/settings/configuration_settings.py b/backend/analytics_server/mhq/service/settings/configuration_settings.py index fb229386..eba23ecf 100644 --- a/backend/analytics_server/mhq/service/settings/configuration_settings.py +++ b/backend/analytics_server/mhq/service/settings/configuration_settings.py @@ -311,10 +311,10 @@ def get_settings_map( settings: List[Settings] = self._settings_repo.get_settings( entity_id=entity_id, setting_types=setting_types, entity_type=entity_type ) - setting_type_to_setting_map: Dict[ - SettingType, Any - ] = self._get_setting_type_to_setting_map( - setting_types, settings, ignore_default_setting_type + setting_type_to_setting_map: Dict[SettingType, Any] = ( + self._get_setting_type_to_setting_map( + setting_types, settings, ignore_default_setting_type + ) ) return setting_type_to_setting_map @@ -331,9 +331,9 @@ def _get_setting_type_to_setting_map( setting_type_to_setting_map: Dict[SettingType, Any] = {} for setting in settings: - setting_type_to_setting_map[ - setting.setting_type - ] = self._adapt_config_setting_from_db_setting(setting).specific_settings + setting_type_to_setting_map[setting.setting_type] = ( + self._adapt_config_setting_from_db_setting(setting).specific_settings + ) for setting_type in setting_types: if (setting_type not in setting_type_to_setting_map) and ( diff --git a/backend/analytics_server/mhq/service/workflows/integration.py b/backend/analytics_server/mhq/service/workflows/integration.py index 8807c71a..5a9aba7b 100644 --- a/backend/analytics_server/mhq/service/workflows/integration.py +++ b/backend/analytics_server/mhq/service/workflows/integration.py @@ -14,10 +14,10 @@ def __init__(self, core_repo_service: CoreRepoService): self.core_repo_service = core_repo_service def get_org_providers(self, org_id: str) -> List[str]: - integrations: List[ - Integration - ] = self.core_repo_service.get_org_integrations_for_names( - org_id, WORKFLOW_INTEGRATION_BUCKET + integrations: List[Integration] = ( + self.core_repo_service.get_org_integrations_for_names( + org_id, WORKFLOW_INTEGRATION_BUCKET + ) ) if not integrations: return [] diff --git a/backend/analytics_server/mhq/service/workflows/sync/etl_handler.py b/backend/analytics_server/mhq/service/workflows/sync/etl_handler.py index a5c0cb99..f5181488 100644 --- a/backend/analytics_server/mhq/service/workflows/sync/etl_handler.py +++ b/backend/analytics_server/mhq/service/workflows/sync/etl_handler.py @@ -37,9 +37,9 @@ def __init__( self.etl_factory = etl_factory def sync_org_workflows(self, org_id: str): - active_repo_workflows: List[ - Tuple[OrgRepo, RepoWorkflow] - ] = self._get_active_repo_workflows(org_id) + active_repo_workflows: List[Tuple[OrgRepo, RepoWorkflow]] = ( + self._get_active_repo_workflows(org_id) + ) for org_repo, repo_workflow in active_repo_workflows: try: @@ -56,9 +56,9 @@ def _get_active_repo_workflows( code_providers: List[str] = get_code_integration_service().get_org_providers( org_id ) - workflow_providers: List[ - str - ] = get_workflows_integrations_service().get_org_providers(org_id) + workflow_providers: List[str] = ( + get_workflows_integrations_service().get_org_providers(org_id) + ) if not code_providers or not workflow_providers: LOG.info(f"No workflow integrations found for org {org_id}") return [] @@ -66,11 +66,11 @@ def _get_active_repo_workflows( org_repos: List[OrgRepo] = self.code_repo_service.get_active_org_repos(org_id) repo_ids = [str(repo.id) for repo in org_repos] repo_id_org_repo_map = {str(repo.id): repo for repo in org_repos} - active_repo_workflows: List[ - RepoWorkflow - ] = self.workflow_repo_service.get_active_repo_workflows_by_repo_ids_and_providers( - repo_ids, - [RepoWorkflowProviders(provider) for provider in workflow_providers], + active_repo_workflows: List[RepoWorkflow] = ( + self.workflow_repo_service.get_active_repo_workflows_by_repo_ids_and_providers( + repo_ids, + [RepoWorkflowProviders(provider) for provider in workflow_providers], + ) ) org_repo_workflows: List[Tuple[OrgRepo, RepoWorkflow]] = [] for repo_workflow in active_repo_workflows: @@ -126,9 +126,9 @@ def __get_repo_workflow_bookmark( def sync_org_workflows(org_id: str): - workflow_providers: List[ - str - ] = get_workflows_integrations_service().get_org_providers(org_id) + workflow_providers: List[str] = ( + get_workflows_integrations_service().get_org_providers(org_id) + ) if not workflow_providers: LOG.info(f"No workflow integrations found for org {org_id}") return diff --git a/backend/analytics_server/mhq/store/repos/code.py b/backend/analytics_server/mhq/store/repos/code.py index 430bbb4f..5221b4f5 100644 --- a/backend/analytics_server/mhq/store/repos/code.py +++ b/backend/analytics_server/mhq/store/repos/code.py @@ -63,9 +63,11 @@ def update_team_repos(self, team: Team, org_repos: List[OrgRepo]): team_repo = TeamRepos( team_id=team.id, org_repo_id=str(repo.id), - prod_branches=["^" + repo.default_branch + "$"] - if repo.default_branch - else None, + prod_branches=( + ["^" + repo.default_branch + "$"] + if repo.default_branch + else None + ), ) updated_team_repos.append(team_repo) diff --git a/backend/analytics_server/mhq/utils/time.py b/backend/analytics_server/mhq/utils/time.py index 6e8b7ceb..3b22882f 100644 --- a/backend/analytics_server/mhq/utils/time.py +++ b/backend/analytics_server/mhq/utils/time.py @@ -261,9 +261,9 @@ def fill_missing_week_buckets( callable_class() if callable_class else None ) else: - week_start_to_object_map_with_weeks_in_interval[ - curr_day - ] = week_start_to_object_map[curr_day] + week_start_to_object_map_with_weeks_in_interval[curr_day] = ( + week_start_to_object_map[curr_day] + ) curr_day = curr_day + timedelta(days=7) diff --git a/backend/analytics_server/tests/factories/models/code.py b/backend/analytics_server/tests/factories/models/code.py index 47d06e2d..793dc262 100644 --- a/backend/analytics_server/tests/factories/models/code.py +++ b/backend/analytics_server/tests/factories/models/code.py @@ -67,9 +67,9 @@ def get_pull_request( merge_time=merge_time, cycle_time=cycle_time, merge_to_deploy=merge_to_deploy, - reviewers=reviewers - if reviewers is not None - else ["randomuser1", "randomuser2"], + reviewers=( + reviewers if reviewers is not None else ["randomuser1", "randomuser2"] + ), meta=meta or {}, url=url, merge_commit_sha=merge_commit_sha, @@ -91,13 +91,15 @@ def get_pull_request_event( id=id or uuid4(), pull_request_id=pull_request_id or uuid4(), type=type or PullRequestEventType.REVIEW.value, - data={ - "user": {"login": reviewer or "User"}, - "state": state or "APPROVED", - "author_association": "NONE", - } - if not data - else data, + data=( + { + "user": {"login": reviewer or "User"}, + "state": state or "APPROVED", + "author_association": "NONE", + } + if not data + else data + ), created_at=created_at or time_now(), idempotency_key=idempotency_key or str(randint(10, 100)), org_repo_id=org_repo_id or uuid4(), diff --git a/backend/dev-requirements.txt b/backend/dev-requirements.txt index 4e27da9f..dd4a42da 100644 --- a/backend/dev-requirements.txt +++ b/backend/dev-requirements.txt @@ -1,3 +1,3 @@ pytest==7.1.1 -black==22.3.0 +black==24.3.0 pre-commit==2.20.0 \ No newline at end of file diff --git a/backend/dev_scripts/make_new_setting.py b/backend/dev_scripts/make_new_setting.py index 1fd50278..2b3ffb27 100644 --- a/backend/dev_scripts/make_new_setting.py +++ b/backend/dev_scripts/make_new_setting.py @@ -1,6 +1,7 @@ """ This script can be used to generate code for a new setting. """ + import re from typing import List, Tuple diff --git a/backend/requirements.txt b/backend/requirements.txt index dd878331..52e81f6a 100644 --- a/backend/requirements.txt +++ b/backend/requirements.txt @@ -4,11 +4,11 @@ stringcase==1.2.0 SQLAlchemy==2.0.29 pytz==2022.1 PyGithub==1.55 -pycryptodome==3.14.1 -aiohttp==3.8.4 +pycryptodome==3.19.1 +aiohttp==3.9.4 redis==5.0.3 python-redis-lock==4.0.0 psycopg2==2.9.3 python-dotenv==1.0.1 -gunicorn==21.0.1 +gunicorn==22.0.0 Flask-SQLAlchemy==3.1.1