Skip to content

Commit

Permalink
First Commit
Browse files Browse the repository at this point in the history
  • Loading branch information
morenod committed Jul 5, 2023
0 parents commit 3be6d40
Show file tree
Hide file tree
Showing 16 changed files with 2,258 additions and 0 deletions.
2 changes: 2 additions & 0 deletions .gitignore
Original file line number Diff line number Diff line change
@@ -0,0 +1,2 @@
__pycache__/
**/__pycache__/
26 changes: 26 additions & 0 deletions .pre-commit-config.yaml
Original file line number Diff line number Diff line change
@@ -0,0 +1,26 @@
repos:
- repo: https://github.com/igorshubovych/markdownlint-cli
rev: v0.35.0
hooks:
- id: markdownlint
args: [--disable, MD013, MD002]
- repo: https://github.com/jumanjihouse/pre-commit-hooks
rev: 3.0.0
hooks:
- id: shellcheck
- repo: https://github.com/pre-commit/pre-commit-hooks
rev: v4.4.0
hooks:
- id: check-json
- repo: https://github.com/PyCQA/flake8
rev: 6.0.0
hooks:
- id: flake8
args:
- --max-line-length=300
- repo: https://github.com/PyCQA/pylint
rev: v3.0.0a6
hooks:
- id: pylint
args:
- --max-line-length=300
Empty file added libs/__init__.py
Empty file.
129 changes: 129 additions & 0 deletions libs/arguments.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,129 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Module to set common arguments used by all platforms, and import Arguments for each platform and modules
"""
import argparse
import re
import importlib
import sys
from libs.elasticsearch import ElasticArguments
from libs.logging import LoggingArguments


class Arguments(argparse.ArgumentParser):
"""Common Arguments and imports for logging and elasticsearch arguments"""

def __init__(self, environment):
super().__init__()
EnvDefault = self.EnvDefault

self.common_parser = argparse.ArgumentParser(description="Common Arguments", add_help=False)

self.common_parser.add_argument("--platform", action=EnvDefault, env=environment, envvar="ROSA_BURNER_PLATFORM", required=True, choices=["rosa"])
self.common_parser.add_argument("--subplatform", action=EnvDefault, env=environment, envvar="ROSA_BURNER_SUBPLATFORM", help="Subplatforms of Platform", required=False)

self.common_parser.add_argument("--uuid", action=EnvDefault, env=environment, envvar="ROSA_BURNER_UUID", required=False)
self.common_parser.add_argument("--path", action=EnvDefault, env=environment, envvar="ROSA_BURNER_PATH", required=False)

self.common_parser.add_argument("--cluster-name-seed", action=EnvDefault, env=environment, envvar="ROSA_BURNER_CLUSTER_NAME_SEED", type=str, default="perfsc", help="Seed used to generate cluster names. 6 chars max")

self.common_parser.add_argument("--workers", action=EnvDefault, env=environment, envvar="ROSA_BURNER_WORKERS", type=self._verify_workers, required=True, default="3",
help="Number of workers for the hosted cluster (min: 3). If list (comma separated), iteration over the list until reach number of clusters")
self.common_parser.add_argument("--workers-wait-time", type=int, default=60, action=EnvDefault, env=environment, envvar="ROSA_BURNER_WORKERS_WAIT_TIME",
help="Waiting time in minutes for the workers to be Ready after cluster installation or machinepool creation . If 0, do not wait. Default: 60 minutes")
self.common_parser.add_argument("--wait-for-workers", action="store_true", help="After cluster will be ready, wait for all workers to be also ready", required=False)

self.common_parser.add_argument("--cluster-count", action=EnvDefault, env=environment, envvar="ROSA_BURNER_CLUSTER_COUNT", type=int, default=1)
self.common_parser.add_argument("--delay-between-batch", action=EnvDefault, env=environment, envvar="ROSA_BURNER_DELAY_BETWEEN_BATCH", default=60, type=int,
help="If set it will wait x seconds between each batch request", required=False)
self.common_parser.add_argument("--batch-size", action=EnvDefault, env=environment, envvar="ROSA_BURNER_BATCH_SIZE", type=int, default=0, help="number of clusters in a batch", required=False)

self.common_parser.add_argument("--watcher-delay", action=EnvDefault, env=environment, envvar="ROSA_BURNER_WATCHER_DELAY", default=60, type=int, help="Delay between each status check")

self.common_parser.add_argument("--wildcard-options", action=EnvDefault, env=environment, envvar="ROSA_BURNER_WILDCARD_OPTIONS", help="String to be passed directly to cluster create command on any platform. It wont be validated", required=False)

self.common_parser.add_argument("--enable-workload", action="store_true", help="Execute workload after clusters are installed", required=False)
self.common_parser.add_argument("--workload-repo", action=EnvDefault, env=environment, envvar="ROSA_BURNER_WORKLOAD_REPO", default="https://github.com/cloud-bulldozer/e2e-benchmarking.git", type=str, help="Git Repo of the workload", required=False)
self.common_parser.add_argument("--workload", action=EnvDefault, env=environment, envvar="ROSA_BURNER_WORKLOAD", help="Workload to execute after clusters are installed", default="cluster-density-ms")
self.common_parser.add_argument("--workload-script", action=EnvDefault, env=environment, envvar="ROSA_BURNER_WORKLOAD_SCRIPT", help="Workload to execute after clusters are installed", default="workloads/kube-burner-ocp-wrapper/run.sh")
self.common_parser.add_argument("--workload-executor", action=EnvDefault, env=environment, envvar="ROSA_BURNER_WORKLOAD_EXECUTOR", help="Complete path of binary used to execute the workload", default="/usr/bin/kube-burner")
self.common_parser.add_argument("--workload-duration", action=EnvDefault, env=environment, envvar="ROSA_BURNER_WORKLOAD_DURATION", default="1h", type=str, help="Workload execution duration in minutes", required=False)
self.common_parser.add_argument("--workload-jobs", action=EnvDefault, env=environment, envvar="ROSA_BURNER_WORKLOAD_JOBS", type=int, default=10, help="Jobs per worker.Workload will scale this number to the number of workers of the cluster", required=False)

self.common_parser.add_argument("--cleanup-clusters", action="store_true", help="Delete all created clusters at the end", required=False)
self.common_parser.add_argument("--wait-before-cleanup", action=EnvDefault, env=environment, envvar="ROSA_BURNER_WAIT_BEFORE_CLEANUP", help="Minutes to wait before starting the cleanup process", default=0, type=int, required=False)
self.common_parser.add_argument("--delay-between-cleanup", action=EnvDefault, env=environment, envvar="ROSA_BURNER_DELAY_BETWEEN_CLEANUP", help="Minutes to wait between cluster deletion", default=0, type=int, required=False)

self.common_args, self.unknown_args = self.common_parser.parse_known_args()

log_parser = argparse.ArgumentParser(description="Logging Arguments", add_help=False)
LoggingArguments(log_parser, environment)

es_parser = argparse.ArgumentParser(description="ElasticSearch Arguments", add_help=False)
ElasticArguments(es_parser, environment)

try:
if self.common_args.subplatform:
platform_module_path = "libs.platforms." + self.common_args.platform + "." + self.common_args.subplatform + "." + self.common_args.subplatform
platform_module = importlib.import_module(platform_module_path)
platformarguments = getattr(platform_module, self.common_args.subplatform.capitalize() + "Arguments")
else:
platform_module_path = "libs.platforms." + self.common_args.platform + "." + self.common_args.platform
platform_module = importlib.import_module(platform_module_path)
platformarguments = getattr(platform_module, self.common_args.platform.capitalize() + "Arguments")
platform_parser = argparse.ArgumentParser(description="Platform Arguments", add_help=False)
platformarguments(platform_parser, environment)
except ImportError as err:
print(err)
sys.exit("Exiting...")
except AttributeError as err:
print(err)
sys.exit("Exiting...")

self.parser = argparse.ArgumentParser(
description="Rosa-Burner",
add_help=True,
parents=[
self.common_parser,
log_parser,
es_parser,
platform_parser,
]
)
self.parameters = vars(self.parser.parse_args())

def __getitem__(self, item):
return self.parameters[item] if item in self.parameters else None

def _verify_workers(self, workers):
pattern = re.compile(r"^(\d+)(,\s*\d+)*$")
if workers.isdigit() and int(workers) % 3 != 0:
self.common_parser.error(f"Invalid value ({workers}) for parameter `--workers`. If digit, it must be divisible by 3'")
elif bool(pattern.match(workers)):
for num in workers.split(","):
if int(num) < 3 or int(num) % 3 != 0:
self.common_parser.error(f"Invalid value ({num}) for parameter `--workers`. If list, all values must be divisible by 3")
return workers

# def set_common_defaults(self):
# """"""
# common_defaults = vars(self.parser.parse_args([]))
# for subparser in [self.rosa_parser, self.hypershift_parser]:
# subparser.set_defaults(**common_defaults)

class EnvDefault(argparse.Action):
"""Argument passed has preference over the envvar"""

def __init__(self, env, envvar, required=True, default=None, **kwargs):
if not default and envvar:
if envvar in env:
default = env[envvar]
if required and default:
required = False
super(Arguments.EnvDefault, self).__init__(
default=default, required=required, **kwargs
)

def __call__(self, parser, namespace, values, option_string=None):
setattr(namespace, self.dest, values)
57 changes: 57 additions & 0 deletions libs/aws.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,57 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Module to set AWS related variables
"""

import configparser
import os
import sys


class AWS:
"""AWS Class"""

def __init__(self, logging, account_file, profile):
if os.path.exists(account_file):
self.logging = logging
logging.info("AWS account file found. Loading account information")
self.aws_config = configparser.RawConfigParser()
self.aws_config.read(account_file)
if len(self.aws_config.sections()) == 1:
profile = self.aws_config.sections()[0]
else:
if not profile:
logging.error("Multiple profiles detected on AWS credentials file but no --aws-profile parameter")
sys.exit("Exiting...")
else:
if profile not in self.aws_config.sections():
logging.error(f"Profile {profile} especified as --aws-profile not found on AWS credentials file {account_file}")
sys.exit("Exiting...")
if ("aws_access_key_id" not in self.aws_config[profile] or "aws_secret_access_key" not in self.aws_config[profile]):
logging.error(f"Missing credentials on file {account_file} for profile {profile}")
sys.exit("Exiting...")
else:
logging.info(f"AWS configuration verified for profile {profile} on file {account_file}")
self.logging.debug(f"AWS Profile: {self.aws_config[profile]}")
else:
logging.error(f"AWS configuration file {account_file} especified as --aws-account-file not found")
sys.exit("Exiting...")

def set_aws_envvars(self, profile, aws_region):
""" Get AWS information from the account_file and set related environment vars"""
profile = self.aws_config.sections()[0] if len(self.aws_config.sections()) == 1 else profile
os.environ["AWS_PROFILE"] = profile
os.environ["AWS_REGION"] = aws_region
os.environ["AWS_ACCESS_KEY_ID"] = self.aws_config[profile]["aws_access_key_id"]
os.environ["AWS_SECRET_ACCESS_KEY"] = self.aws_config[profile]["aws_secret_access_key"]

def set_aws_environment(self, profile, aws_region):
""" Get AWS information from the account_file and save it on the environment object"""
profile = self.aws_config.sections()[0] if len(self.aws_config.sections()) == 1 else profile
aws = {}
aws['profile'] = profile
aws['region'] = aws_region
aws['aws_access_key_id'] = self.aws_config[profile]["aws_access_key_id"]
aws['aws_secret_access_key'] = self.aws_config[profile]["aws_secret_access_key"]
return aws
101 changes: 101 additions & 0 deletions libs/elasticsearch.py
Original file line number Diff line number Diff line change
@@ -0,0 +1,101 @@
#!/usr/bin/env python
# -*- coding: utf-8 -*-
"""
Module to set connection to ElasticSearch and functions to upload documents
"""
import argparse
import sys
from elasticsearch import Elasticsearch as ES
from elasticsearch.exceptions import NotFoundError
from urllib3.util import Retry


class Elasticsearch:
"""ES Class"""

def __init__(self, logging, url, index, insecure, retries):

self.logging = logging
self.index = index

retry_on_timeout = True
retry_strategy = Retry(total=retries, backoff_factor=0.1)
retry_params = {
"retry_on_timeout": retry_on_timeout,
"retry": retry_strategy,
}

self.logging.info("Initializing Elasticsearch Connector...")
if url.startswith("https://"):
self.logging.debug("Setting Elasticsearch Connector with SSL...")
import ssl

ssl_ctx = ssl.create_default_context()
if insecure:
self.logging.debug(
"Setting Elasticsearch Connector with SSL unverified..."
)
ssl_ctx.check_hostname = False
ssl_ctx.verify_mode = ssl.CERT_NONE
self.elastic = ES(url, ssl_context=ssl_ctx, verify_certs=False, **retry_params)
elif url.startswith("http://"):
self.elastic = ES(url, **retry_params)
else:
self.logging.error(
f"Failed to initialize Elasticsearch with url {url}. It must start with http(s)://"
)
sys.exit("Exiting...")
self.logging.debug("Testing Elasticsearch connection")
if self.elastic.ping():
self.logging.debug("Version: " + self.elastic.info()["version"]["number"])
if not self._check_index():
self.logging.error(f"ES index {index} do not exists")
sys.exit("Exiting...")

def _check_index(self):
try:
return self.elastic.indices.exists(index=self.index)
except NotFoundError:
return False

def index_metadata(self, metadata):
self.logging.debug(f"Indexing data on {self.elastic.transport.hosts[0]}/{self.index}")
self.logging.debug(metadata)
try:
self.elastic.index(index=self.index, body=metadata)
except Exception as err:
self.logging.error(err)
self.logging.error(
f"Failed to index data on on {self.elastic.transport.hosts[0]}/{self.elastic.info().get('index')})"
)
self.logging.error(metadata)


class ElasticArguments:
def __init__(self, parser, environment):
EnvDefault = self.EnvDefault
parser.add_argument("--es-url", action=EnvDefault, env=environment, envvar="ROSA_BURNER_ES_URL", help="Elasticsearch URL", required=False)
parser.add_argument("--es-index", action=EnvDefault, env=environment, envvar="ROSA_BURNER_ES_INDEX", help="Elasticsearch Index", default="rosa-burner")
parser.add_argument("--es-index-retry", action=EnvDefault, env=environment, envvar="ROSA_BURNER_ES_INDEX_RETRY", type=int, help="Number of retries when index operation fails", default=5)
parser.add_argument("--es-insecure", action="store_true", help="Bypass cert verification on SSL connections")

# self.parameters = vars(parser.parse_args())

def __getitem__(self, item):
return self.parameters[item] if item in self.parameters else None

class EnvDefault(argparse.Action):
"""Argument passed has preference over the envvar"""

def __init__(self, env, envvar, required=True, default=None, **kwargs):
if not default and envvar:
if envvar in env:
default = env[envvar]
if required and default:
required = False
super(ElasticArguments.EnvDefault, self).__init__(
default=default, required=required, **kwargs
)

def __call__(self, parser, namespace, values, option_string=None):
setattr(namespace, self.dest, values)
Loading

0 comments on commit 3be6d40

Please sign in to comment.