Skip to content

Commit

Permalink
Merge pull request #419 from middlewarehq/dependabot/pip/backend/pip-…
Browse files Browse the repository at this point in the history
…ec4054ff4d

Bump the pip group across 1 directory with 4 updates
  • Loading branch information
amoghjalan authored Jun 18, 2024
2 parents 8c314eb + ebeb22e commit 5c87056
Show file tree
Hide file tree
Showing 30 changed files with 231 additions and 218 deletions.
2 changes: 1 addition & 1 deletion .github/workflows/black.yml
Original file line number Diff line number Diff line change
Expand Up @@ -27,7 +27,7 @@ jobs:
python-version: "3.11.6"

- name: Install Black
run: python -m pip install black==22.3.0
run: python -m pip install black==24.3.0

- name: Run Black Check
run: black . --check
14 changes: 7 additions & 7 deletions backend/analytics_server/mhq/api/deployment_analytics.py
Original file line number Diff line number Diff line change
Expand Up @@ -127,9 +127,9 @@ def get_prs_included_in_deployment(deployment_id: str):

repo: OrgRepo = pr_analytics_service.get_repo_by_id(deployment.repo_id)

prs: List[
PullRequest
] = deployments_service.get_pull_requests_related_to_deployment(deployment)
prs: List[PullRequest] = (
deployments_service.get_pull_requests_related_to_deployment(deployment)
)
repo_id_map = {repo.id: repo}

return get_non_paginated_pr_response(
Expand Down Expand Up @@ -204,10 +204,10 @@ def get_team_deployment_frequency_trends(

deployments_analytics_service = get_deployment_analytics_service()

week_to_deployments_count_map: Dict[
datetime, int
] = deployments_analytics_service.get_weekly_deployment_frequency_trends(
team_id, interval, pr_filter, workflow_filter
week_to_deployments_count_map: Dict[datetime, int] = (
deployments_analytics_service.get_weekly_deployment_frequency_trends(
team_id, interval, pr_filter, workflow_filter
)
)

return {
Expand Down
14 changes: 7 additions & 7 deletions backend/analytics_server/mhq/api/incidents.py
Original file line number Diff line number Diff line change
Expand Up @@ -92,9 +92,9 @@ def get_deployments_with_related_incidents(

incidents: List[Incident] = incident_service.get_team_incidents(team_id, interval)

deployment_incidents_map: Dict[
Deployment, List[Incident]
] = incident_service.get_deployment_incidents_map(deployments, incidents)
deployment_incidents_map: Dict[Deployment, List[Incident]] = (
incident_service.get_deployment_incidents_map(deployments, incidents)
)

return list(
map(
Expand Down Expand Up @@ -238,10 +238,10 @@ def get_team_cfr_trends(

incidents: List[Incident] = incident_service.get_team_incidents(team_id, interval)

team_weekly_change_failure_rate: Dict[
datetime, ChangeFailureRateMetrics
] = incident_service.get_weekly_change_failure_rate(
interval, deployments, incidents
team_weekly_change_failure_rate: Dict[datetime, ChangeFailureRateMetrics] = (
incident_service.get_weekly_change_failure_rate(
interval, deployments, incidents
)
)

return {
Expand Down
6 changes: 3 additions & 3 deletions backend/analytics_server/mhq/api/pull_requests.py
Original file line number Diff line number Diff line change
Expand Up @@ -159,9 +159,9 @@ def get_team_lead_time_trends(

lead_time_service = get_lead_time_service()

weekly_lead_time_metrics_avg_map: Dict[
datetime, LeadTimeMetrics
] = lead_time_service.get_team_lead_time_metrics_trends(team, interval, pr_filter)
weekly_lead_time_metrics_avg_map: Dict[datetime, LeadTimeMetrics] = (
lead_time_service.get_team_lead_time_metrics_trends(team, interval, pr_filter)
)

return {
week.isoformat(): adapt_lead_time_metrics(average_lead_time_metrics)
Expand Down
14 changes: 8 additions & 6 deletions backend/analytics_server/mhq/api/resources/incident_resources.py
Original file line number Diff line number Diff line change
Expand Up @@ -22,12 +22,14 @@ def adapt_incident(
"provider": incident.provider,
"status": incident.status,
"creation_date": incident.creation_date.isoformat(),
"resolved_date": incident.resolved_date.isoformat()
if incident.resolved_date
else None,
"acknowledged_date": incident.acknowledged_date.isoformat()
if incident.acknowledged_date
else None,
"resolved_date": (
incident.resolved_date.isoformat() if incident.resolved_date else None
),
"acknowledged_date": (
incident.acknowledged_date.isoformat()
if incident.acknowledged_date
else None
),
"assigned_to": adapt_user_info(incident.assigned_to, username_user_map),
"assignees": list(
map(
Expand Down
8 changes: 4 additions & 4 deletions backend/analytics_server/mhq/exapi/git_incidents.py
Original file line number Diff line number Diff line change
Expand Up @@ -40,10 +40,10 @@ def get_org_repo(self, repo_id: str):
def get_repo_revert_prs_in_interval(
self, repo_id: str, from_time: datetime, to_time: datetime
) -> List[RevertPRMap]:
revert_pr_mappings: List[
PullRequestRevertPRMapping
] = self.code_repo_service.get_repo_revert_prs_mappings_updated_in_interval(
repo_id, from_time, to_time
revert_pr_mappings: List[PullRequestRevertPRMapping] = (
self.code_repo_service.get_repo_revert_prs_mappings_updated_in_interval(
repo_id, from_time, to_time
)
)

revert_pr_ids = [str(pr.pr_id) for pr in revert_pr_mappings]
Expand Down
8 changes: 4 additions & 4 deletions backend/analytics_server/mhq/service/code/integration.py
Original file line number Diff line number Diff line change
Expand Up @@ -13,10 +13,10 @@ def __init__(self, core_repo_service: CoreRepoService):
self.core_repo_service = core_repo_service

def get_org_providers(self, org_id: str) -> List[str]:
integrations: List[
Integration
] = self.core_repo_service.get_org_integrations_for_names(
org_id, CODE_INTEGRATION_BUCKET
integrations: List[Integration] = (
self.core_repo_service.get_org_integrations_for_names(
org_id, CODE_INTEGRATION_BUCKET
)
)
if not integrations:
return []
Expand Down
30 changes: 16 additions & 14 deletions backend/analytics_server/mhq/service/code/lead_time.py
Original file line number Diff line number Diff line change
Expand Up @@ -54,15 +54,15 @@ def get_team_lead_time_metrics_trends(
set(self._get_team_repos_lead_time_metrics(team_repos, interval, pr_filter))
)

weekly_lead_time_metrics_map: Dict[
datetime, List[LeadTimeMetrics]
] = generate_expanded_buckets(
lead_time_metrics, interval, "merged_at", "weekly"
weekly_lead_time_metrics_map: Dict[datetime, List[LeadTimeMetrics]] = (
generate_expanded_buckets(
lead_time_metrics, interval, "merged_at", "weekly"
)
)

weekly_lead_time_metrics_avg_map: Dict[
datetime, LeadTimeMetrics
] = self.get_avg_lead_time_metrics_from_map(weekly_lead_time_metrics_map)
weekly_lead_time_metrics_avg_map: Dict[datetime, LeadTimeMetrics] = (
self.get_avg_lead_time_metrics_from_map(weekly_lead_time_metrics_map)
)

weekly_lead_time_metrics_avg_map = fill_missing_week_buckets(
weekly_lead_time_metrics_avg_map, interval, LeadTimeMetrics
Expand Down Expand Up @@ -176,10 +176,10 @@ def _get_lead_time_prs_for_repos_using_workflow_deployments(
pr_filter: PRFilter = None,
) -> List[PullRequest]:

team_repos_with_workflow_deployments_configured: List[
TeamRepos
] = self._deployments_service.get_filtered_team_repos_with_workflow_configured_deployments(
team_repos
team_repos_with_workflow_deployments_configured: List[TeamRepos] = (
self._deployments_service.get_filtered_team_repos_with_workflow_configured_deployments(
team_repos
)
)

repo_ids = [
Expand Down Expand Up @@ -214,9 +214,11 @@ def _get_lead_time_prs_for_repos_using_pr_deployments(

def _get_lead_time_metrics_for_pr(self, pr: PullRequest) -> LeadTimeMetrics:
return LeadTimeMetrics(
first_commit_to_open=pr.first_commit_to_open
if pr.first_commit_to_open is not None and pr.first_commit_to_open > 0
else 0,
first_commit_to_open=(
pr.first_commit_to_open
if pr.first_commit_to_open is not None and pr.first_commit_to_open > 0
else 0
),
first_response_time=pr.first_response_time if pr.first_response_time else 0,
rework_time=pr.rework_time if pr.rework_time else 0,
merge_time=pr.merge_time if pr.merge_time else 0,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -194,9 +194,9 @@ def _adapt_org_incident_service(
name=org_repo.name,
key=str(org_repo.id),
meta={},
created_at=org_incident_service.created_at
if org_incident_service
else time_now(),
created_at=(
org_incident_service.created_at if org_incident_service else time_now()
),
updated_at=time_now(),
source_type=IncidentSource.GIT_REPO,
)
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -92,9 +92,11 @@ def get_pr_performance(pr: PullRequest, pr_events: [PullRequestEvent]):
cycle_time = cycle_time.total_seconds()

return PRPerformance(
first_review_time=(first_review.created_at - pr.created_at).total_seconds()
if first_review
else -1,
first_review_time=(
(first_review.created_at - pr.created_at).total_seconds()
if first_review
else -1
),
rework_time=rework_time,
merge_time=merge_time,
cycle_time=cycle_time if pr.state == PullRequestState.MERGED else -1,
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -166,9 +166,9 @@ def process_pr(
pr_model: Optional[PullRequest] = self.code_repo_service.get_repo_pr_by_number(
repo_id, pr.number
)
pr_event_model_list: List[
PullRequestEvent
] = self.code_repo_service.get_pr_events(pr_model)
pr_event_model_list: List[PullRequestEvent] = (
self.code_repo_service.get_pr_events(pr_model)
)
pr_commits_model_list: List = []

reviews: List[GithubPullRequestReview] = list(self._api.get_pr_reviews(pr))
Expand Down Expand Up @@ -340,9 +340,11 @@ def _to_pr_commits(
url=commit["html_url"],
data=commit,
message=commit["commit"]["message"],
author=commit["author"]["login"]
if commit.get("author")
else commit["commit"].get("committer", {}).get("email", ""),
author=(
commit["author"]["login"]
if commit.get("author")
else commit["commit"].get("committer", {}).get("email", "")
),
created_at=self._dt_from_github_dt_string(
commit["commit"]["committer"]["date"]
),
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -70,10 +70,10 @@ def _get_revert_pr_mapping_for_original_prs(
if len(pr_numbers_match_strings) == 0:
return []

revert_prs: List[
PullRequest
] = self.code_repo_service.get_prs_by_head_branch_match_strings(
list(repo_ids), pr_numbers_match_strings
revert_prs: List[PullRequest] = (
self.code_repo_service.get_prs_by_head_branch_match_strings(
list(repo_ids), pr_numbers_match_strings
)
)

revert_pr_mappings: List[PullRequestRevertPRMapping] = []
Expand Down Expand Up @@ -136,10 +136,10 @@ def _get_revert_pr_mapping_for_revert_prs(
if len(revert_pr_numbers) == 0:
return []

reverted_prs: List[
PullRequest
] = self.code_repo_service.get_reverted_prs_by_numbers(
list(repo_ids), revert_pr_numbers
reverted_prs: List[PullRequest] = (
self.code_repo_service.get_reverted_prs_by_numbers(
list(repo_ids), revert_pr_numbers
)
)

revert_pr_mappings: List[PullRequestRevertPRMapping] = []
Expand Down
46 changes: 23 additions & 23 deletions backend/analytics_server/mhq/service/deployments/analytics.py
Original file line number Diff line number Diff line change
Expand Up @@ -43,27 +43,27 @@ def get_team_all_deployments_in_interval_with_related_prs(
related pull requests. Each deployment is associated with a list of pull requests that contributed to it.
"""

deployments: List[
Deployment
] = self.deployments_service.get_team_all_deployments_in_interval(
team_id, interval, pr_filter, workflow_filter
deployments: List[Deployment] = (
self.deployments_service.get_team_all_deployments_in_interval(
team_id, interval, pr_filter, workflow_filter
)
)

team_repos: List[TeamRepos] = self._get_team_repos_by_team_id(team_id)
repo_ids: List[str] = [str(team_repo.org_repo_id) for team_repo in team_repos]

pull_requests: List[
PullRequest
] = self.code_repo_service.get_prs_merged_in_interval(
repo_ids, interval, pr_filter
pull_requests: List[PullRequest] = (
self.code_repo_service.get_prs_merged_in_interval(
repo_ids, interval, pr_filter
)
)

repo_id_branch_to_pr_list_map: Dict[
Tuple[str, str], List[PullRequest]
] = self._map_prs_to_repo_id_and_base_branch(pull_requests)
repo_id_branch_to_deployments_map: Dict[
Tuple[str, str], List[Deployment]
] = self._map_deployments_to_repo_id_and_head_branch(deployments)
repo_id_branch_to_pr_list_map: Dict[Tuple[str, str], List[PullRequest]] = (
self._map_prs_to_repo_id_and_base_branch(pull_requests)
)
repo_id_branch_to_deployments_map: Dict[Tuple[str, str], List[Deployment]] = (
self._map_deployments_to_repo_id_and_head_branch(deployments)
)

repo_id_to_deployments_with_pr_map: Dict[
str, Dict[Deployment, List[PullRequest]]
Expand All @@ -76,9 +76,9 @@ def get_team_all_deployments_in_interval_with_related_prs(
relevant_prs: List[PullRequest] = repo_id_branch_to_pr_list_map.get(
(repo_id, base_branch), []
)
deployments_pr_map: Dict[
Deployment, List[PullRequest]
] = self._map_prs_to_deployments(relevant_prs, deployments)
deployments_pr_map: Dict[Deployment, List[PullRequest]] = (
self._map_prs_to_deployments(relevant_prs, deployments)
)

repo_id_to_deployments_with_pr_map[repo_id].update(deployments_pr_map)

Expand Down Expand Up @@ -137,9 +137,9 @@ def _map_prs_to_repo_id_and_base_branch(
def _map_deployments_to_repo_id_and_head_branch(
self, deployments: List[Deployment]
) -> Dict[Tuple[str, str], List[Deployment]]:
repo_id_branch_deployments_map: Dict[
Tuple[str, str], List[Deployment]
] = defaultdict(list)
repo_id_branch_deployments_map: Dict[Tuple[str, str], List[Deployment]] = (
defaultdict(list)
)
for deployment in deployments:
repo_id = str(deployment.repo_id)
head_branch = deployment.head_branch
Expand Down Expand Up @@ -182,9 +182,9 @@ def _get_deployment_frequency_from_date_to_deployment_map(
This method takes a dict of datetime representing (day/week/month) to Deployments and returns avg deployment frequency
"""

date_to_deployment_count_map: Dict[
datetime, int
] = get_key_to_count_map_from_key_to_list_map(date_to_deployment_map)
date_to_deployment_count_map: Dict[datetime, int] = (
get_key_to_count_map_from_key_to_list_map(date_to_deployment_map)
)

return get_average_of_dict_values(date_to_deployment_count_map)

Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -64,19 +64,19 @@ def get_filtered_team_repos_with_workflow_configured_deployments(
Get team repos with workflow deployments configured.
That is the repo has a workflow configured and team repo has deployment type as workflow.
"""
filtered_team_repos: List[
TeamRepos
] = self._filter_team_repos_using_workflow_deployments(team_repos)
filtered_team_repos: List[TeamRepos] = (
self._filter_team_repos_using_workflow_deployments(team_repos)
)

repo_ids = [str(tr.org_repo_id) for tr in filtered_team_repos]
repo_id_to_team_repo_map = {
str(tr.org_repo_id): tr for tr in filtered_team_repos
}

repo_workflows: List[
RepoWorkflow
] = self.workflow_repo_service.get_repo_workflow_by_repo_ids(
repo_ids, RepoWorkflowType.DEPLOYMENT
repo_workflows: List[RepoWorkflow] = (
self.workflow_repo_service.get_repo_workflow_by_repo_ids(
repo_ids, RepoWorkflowType.DEPLOYMENT
)
)
workflows_repo_ids = list(
set([str(workflow.org_repo_id) for workflow in repo_workflows])
Expand Down
Original file line number Diff line number Diff line change
Expand Up @@ -22,10 +22,10 @@ def __init__(
def get_repos_successful_deployments_in_interval(
self, repo_ids: List[str], interval: Interval, pr_filter: PRFilter
) -> List[Deployment]:
pull_requests: List[
PullRequest
] = self.code_repo_service.get_prs_merged_in_interval(
repo_ids, interval, pr_filter=pr_filter
pull_requests: List[PullRequest] = (
self.code_repo_service.get_prs_merged_in_interval(
repo_ids, interval, pr_filter=pr_filter
)
)

return self.deployments_adapter.adapt_many(pull_requests)
Expand Down
Loading

0 comments on commit 5c87056

Please sign in to comment.