Skip to content
This repository has been archived by the owner on Jun 9, 2023. It is now read-only.

Commit

Permalink
Merge pull request #557 from xtuchyna/fix/kebechet-metrics-logging
Browse files Browse the repository at this point in the history
Fix kebechet adviser metrics, fix pre-commit, update pyproject.toml
  • Loading branch information
sesheta authored Apr 6, 2022
2 parents 0fa0eaa + 307aad8 commit 5e38c8a
Show file tree
Hide file tree
Showing 9 changed files with 461 additions and 248 deletions.
6 changes: 3 additions & 3 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -1,7 +1,7 @@
---
repos:
- repo: https://github.com/Lucas-C/pre-commit-hooks
rev: v1.1.12
rev: v1.1.13
hooks:
- id: remove-tabs

Expand Down Expand Up @@ -31,14 +31,14 @@ repos:
- id: pydocstyle

- repo: https://github.com/pre-commit/mirrors-mypy
rev: v0.931
rev: v0.942
hooks:
- id: mypy
exclude: '^(docs|tasks|tests)|setup\.py'
args: [--ignore-missing-imports, --no-strict-optional]

- repo: https://github.com/psf/black
rev: 22.1.0
rev: 22.3.0
hooks:
- id: black
args: ['--target-version', 'py38']
Expand Down
2 changes: 1 addition & 1 deletion Pipfile
Original file line number Diff line number Diff line change
Expand Up @@ -22,7 +22,7 @@ hypothesis-auto = "*"
hypothesis = "*"
pytest-mypy = "*"
mypy = "*"
pre-commit = "*"
pre-commit = "==2.15.0"
twine = "*"
autopep8 = "*"
rope = "*"
Expand Down
395 changes: 199 additions & 196 deletions Pipfile.lock

Large diffs are not rendered by default.

5 changes: 4 additions & 1 deletion pyproject.toml
Original file line number Diff line number Diff line change
@@ -1,6 +1,9 @@
[build-system]
requires = ["setuptools", "wheel"]

[tool.black]
line-length = 120
py38 = true
target-version = ['py38']
include = '\.pyi?$'
exclude = '''
/(
Expand Down
26 changes: 12 additions & 14 deletions srcopsmetrics/cli.py
Original file line number Diff line number Diff line change
Expand Up @@ -20,7 +20,6 @@
import logging
import os
from datetime import date, timedelta
from pathlib import Path
from typing import List, Optional

import click
Expand All @@ -30,8 +29,6 @@
from srcopsmetrics.enums import EntityTypeEnum, StoragePath
from srcopsmetrics.github_knowledge import GitHubKnowledge
from srcopsmetrics.kebechet_metrics import KebechetMetrics
from srcopsmetrics.metrics import Metrics
from srcopsmetrics.storage import KnowledgeStorage

_LOGGER = logging.getLogger("aicoe-src-ops-metrics")
logging.basicConfig(level=logging.INFO)
Expand Down Expand Up @@ -125,7 +122,7 @@ def get_entities_as_list(entities_raw: Optional[str]) -> List[str]:
)
@click.option(
"--metrics",
"-m",
"-x",
is_flag=True,
required=False,
help="""Launch Metrics Calculation for specified repository.""",
Expand Down Expand Up @@ -191,20 +188,21 @@ def cli(
kebechet_metrics = KebechetMetrics(repository=repo, day=yesterday, is_local=is_local)
kebechet_metrics.evaluate_and_store_kebechet_metrics()

if metrics:
repo_metrics = Metrics(repository=repos[0], visualize=visualize_statistics)
# TODO metrics class not working
# if metrics:
# repo_metrics = Metrics(repository=repos[0], visualize=visualize_statistics)

repo_metrics.get_metrics_outliers_pull_requests()
repo_metrics.get_metrics_outliers_issues()
# repo_metrics.get_metrics_outliers_pull_requests()
# repo_metrics.get_metrics_outliers_issues()

scores = repo_metrics.evaluate_scores_for_pull_requests()
# scores = repo_metrics.evaluate_scores_for_pull_requests()

path = Path(f"./srcopsmetrics/metrics/{repos[0]}/pr_scores.json")
KnowledgeStorage(is_local=is_local).save_knowledge(file_path=path, data=scores)
# path = Path(f"./srcopsmetrics/metrics/{repos[0]}/pr_scores.json")
# KnowledgeStorage(is_local=is_local).save_knowledge(file_path=path, data=scores)

scores_issues = repo_metrics.evaluate_scores_for_issues()
path = Path(f"./srcopsmetrics/metrics/{repos[0]}/issue_scores.json")
KnowledgeStorage(is_local=is_local).save_knowledge(file_path=path, data=scores_issues)
# scores_issues = repo_metrics.evaluate_scores_for_issues()
# path = Path(f"./srcopsmetrics/metrics/{repos[0]}/issue_scores.json")
# KnowledgeStorage(is_local=is_local).save_knowledge(file_path=path, data=scores_issues)

if merge:
if thoth:
Expand Down
25 changes: 19 additions & 6 deletions srcopsmetrics/entities/interface.py
Original file line number Diff line number Diff line change
Expand Up @@ -29,7 +29,7 @@
from voluptuous.error import MultipleInvalid
from voluptuous.schema_builder import Schema

from srcopsmetrics import github_handling, utils
from srcopsmetrics import utils
from srcopsmetrics.entities.tools.storage import KnowledgeStorage
from srcopsmetrics.enums import StoragePath

Expand All @@ -55,8 +55,8 @@ def __init__(self, repository_name: Optional[str] = None, repository: Optional[R
raise ValueError("Repository object or slug is required")

self.repository = repository
if not repository:
self.repository = github_handling.connect_to_source(repository_name)
# if not repository:
# self.repository = github_handling.connect_to_source(repository_name)

@classmethod
def name(cls) -> str:
Expand Down Expand Up @@ -113,7 +113,14 @@ def file_path(self) -> Path:
appendix = ".json" # if as_csv else ".json" TODO implement as_csv bool
return project_path.joinpath("./" + self.filename + appendix)

def save_knowledge(self, file_path: Path = None, is_local: bool = False, as_csv: bool = False):
def save_knowledge(
self,
file_path: Path = None,
is_local: bool = False,
as_csv: bool = False,
from_dataframe: bool = False,
from_singleton: bool = False,
):
"""Save collected knowledge as json."""
if self.stored_entities is None or len(self.stored_entities) == 0:
_LOGGER.info("Nothing to store.")
Expand All @@ -129,8 +136,14 @@ def save_knowledge(self, file_path: Path = None, is_local: bool = False, as_csv:
_LOGGER.warning("Data found to be inconsistent with its schema, original message:")
_LOGGER.warning(str(e))

new_data = pd.DataFrame.from_dict(self.stored_entities).T
to_save = pd.concat([new_data, self.previous_knowledge])
if from_dataframe:
if from_singleton:
to_save = self.stored_entities
else:
raise NotImplementedError
else:
new_data = pd.DataFrame.from_dict(self.stored_entities).T
to_save = pd.concat([new_data, self.previous_knowledge])

_LOGGER.info("Knowledge file %s", (os.path.basename(file_path)))
_LOGGER.info("new %d entities", len(self.stored_entities))
Expand Down
46 changes: 46 additions & 0 deletions srcopsmetrics/entities/thoth_advise_metrics.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,46 @@
# Copyright (C) 2022 Dominik Tuchyna
#
# This file is part of thoth-station/mi - Meta-information Indicators.
#
# thoth-station/mi - Meta-information Indicators is free software: you can redistribute it and/or modify
# it under the terms of the GNU General Public License as published by
# the Free Software Foundation, either version 3 of the License, or
# (at your option) any later version.
#
# thoth-station/mi - Meta-information Indicators is distributed in the hope that it will be useful,
# but WITHOUT ANY WARRANTY; without even the implied warranty of
# MERCHANTABILITY or FITNESS FOR A PARTICULAR PURPOSE. See the
# GNU General Public License for more details.
#
# You should have received a copy of the GNU General Public License
# along with thoth-station/mi - Meta-information Indicators. If not, see <http://www.gnu.org/licenses/>.

"""Adivse manager metrics."""

from typing import List

from voluptuous.schema_builder import Schema
from voluptuous.validators import Any

from srcopsmetrics.entities import Entity


class ThothAdviseMetrics(Entity):
"""Kebechet managers metrics class for a repository.
Intended to be used only for loading and storing operations.
"""

entity_schema = Schema({int: {str: Any(str, int)}})

def analyse(self) -> List[Any]:
"""Override :func:`~Entity.analyse`."""
raise NotImplementedError("cannot use with metrics")

def store(self, github_entity):
"""Override :func:`~Entity.store`."""
raise NotImplementedError("cannot use with metrics")

def get_raw_github_data(self):
"""Override :func:`~Entity.get_raw_github_data`."""
raise NotImplementedError("cannot use with metrics")
14 changes: 9 additions & 5 deletions srcopsmetrics/entities/tools/storage.py
Original file line number Diff line number Diff line change
Expand Up @@ -36,9 +36,14 @@

def load_data_frame(path_or_buf: Union[Path, Any]) -> pd.DataFrame:
"""Load DataFrame from either string data or path."""
df = pd.read_json(path_or_buf, orient="records", lines=True)
if not df.empty:
df = pd.DataFrame()

if isinstance(path_or_buf, dict):
df = pd.DataFrame.from_dict(path_or_buf, orient="index")
else:
df = pd.read_json(path_or_buf, orient="records", lines=True)
df = df.set_index("id")

return df


Expand Down Expand Up @@ -128,7 +133,6 @@ def load_data(self, file_path: Optional[Path] = None, as_json: bool = False) ->
else self.load_remotely(file_path, as_json=as_json)
)

_LOGGER.info("Data from file %s loaded")
return results

@staticmethod
Expand All @@ -137,7 +141,7 @@ def load_locally(file_path: Path, as_json: bool = False) -> pd.DataFrame:
_LOGGER.info("Loading knowledge locally")

if not file_path.exists():
_LOGGER.debug("Knowledge %s not found locally" % file_path)
_LOGGER.info("Knowledge %s not found locally" % file_path)
return pd.DataFrame()

if as_json:
Expand All @@ -156,5 +160,5 @@ def load_remotely(self, file_path: Path, as_json: bool = False) -> pd.DataFrame:
return data

except NotFoundError:
_LOGGER.debug("Knowledge %s not found on Ceph" % ceph_filename)
_LOGGER.info("Knowledge %s not found on Ceph" % ceph_filename)
return pd.DataFrame()
Loading

0 comments on commit 5e38c8a

Please sign in to comment.