Skip to content

Commit

Permalink
Hypershift postinstall to add firewall rules (#338)
Browse files Browse the repository at this point in the history
* dynamic post install tasks

* removed k8s config library and other unused code
  • Loading branch information
mukrishn authored Jul 12, 2023
1 parent 626d70e commit f984ea7
Show file tree
Hide file tree
Showing 4 changed files with 21 additions and 31 deletions.
4 changes: 2 additions & 2 deletions dags/openshift_nightlies/dag.py
Original file line number Diff line number Diff line change
Expand Up @@ -160,9 +160,9 @@ def build(self):
hosted_installer = self._get_hypershift_openshift_installer()
wait_task = hosted_installer.wait_task()
wait_before_cleanup = hosted_installer.wait_task(id="wait_before_cleanup")
for c_id, install_hc, cleanup_hc in install_cluster:
for c_id, install_hc, postinstall_hc, cleanup_hc in install_cluster:
benchmark = self._add_benchmarks(task_group=c_id)
install_hc >> wait_task >> benchmark >> wait_before_cleanup >> cleanup_hc
install_hc >> postinstall_hc >> wait_task >> benchmark >> wait_before_cleanup >> cleanup_hc
else:
install_cluster = installer.get_install_task()
final_status = final_dag_status.get_task(self.dag)
Expand Down
36 changes: 12 additions & 24 deletions dags/openshift_nightlies/scripts/utils/rosa_post_install.py
Original file line number Diff line number Diff line change
Expand Up @@ -11,16 +11,14 @@
# See the License for the specific language governing permissions and
# limitations under the License.

from kubernetes import client, config
from openshift.dynamic import DynamicClient
import sys
import argparse
import subprocess
import os
import json

# Make aws related config changes such as security group rules etc
def _aws_config(nodes,clustername,jsonfile):
def _aws_config(clustername,jsonfile,kubeconfig):
try:
json_file = json.load(open(jsonfile))
except Exception as err:
Expand All @@ -30,29 +28,30 @@ def _aws_config(nodes,clustername,jsonfile):
my_env = os.environ.copy()
my_env['AWS_ACCESS_KEY_ID'] = json_file['aws_access_key_id']
my_env['AWS_SECRET_ACCESS_KEY'] = json_file['aws_secret_access_key']
my_env['AWS_DEFAULT_REGION'] = json_file['aws_region_for_openshift']
my_env['AWS_DEFAULT_REGION'] = json_file['aws_region']
if "rosa_hcp" in json_file and json_file["rosa_hcp"] == "true":
clustername_check_cmd = ["oc get infrastructures.config.openshift.io cluster -o json --kubeconfig " + kubeconfig + " | jq -r '.status.platformStatus.aws.resourceTags[] | select( .key == \"api.openshift.com/name\" ).value'"]
print(clustername_check_cmd)
process = subprocess.Popen(clustername_check_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, env=my_env)
stdout,stderr = process.communicate()
clustername = stdout.decode("utf-8").replace('\n','').replace(' ','')
vpc_cmd = ["aws ec2 describe-instances --query 'Reservations[*].Instances[*].[InstanceId,Tags[?Key==`Name`].Value|[0],State.Name,PrivateIpAddress,PublicIpAddress, PrivateDnsName, VpcId]' --output text | column -t | grep " + clustername + "| awk '{print $7}' | grep -v '^$' | sort -u"]
print(vpc_cmd)
process = subprocess.Popen(vpc_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, env=my_env)
stdout,stderr = process.communicate()
print("VPC:")
print(stdout)
print(stderr)
cluster_vpc = stdout.decode("utf-8")
cluster_vpc = cluster_vpc.replace('\n','')
cluster_vpc = cluster_vpc.replace(' ','')
cluster_vpc = stdout.decode("utf-8").replace('\n','').replace(' ','')
sec_grp_cmd = ["aws ec2 describe-security-groups --filters \"Name=vpc-id,Values=" + cluster_vpc + "\" --output json | jq .SecurityGroups[].GroupId"]
print(sec_grp_cmd)
process = subprocess.Popen(sec_grp_cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True, env=my_env)
stdout,stderr = process.communicate()
print("Security Groups:")
print(stdout)
print(stderr)
sec_group = stdout.decode("utf-8")
sec_group = stdout.decode("utf-8").replace(' ','').replace('"','').replace('\n',' ')

sec_group = sec_group.replace(' ','')
sec_group = sec_group.replace('"','')
sec_group = sec_group.replace('\n',' ')
sec_group_list = list(sec_group.split(" "))
print(sec_group_list)

Expand Down Expand Up @@ -94,28 +93,17 @@ def main():
help='Optional configuration file including all the dag vars')
args = parser.parse_args()

if args.incluster.lower() == "true":
config.load_incluster_config()
k8s_config = client.Configuration()
k8s_client = client.api_client.ApiClient(configuration=k8s_config)
elif args.kubeconfig:
k8s_client = config.new_client_from_config(args.kubeconfig)
else:
k8s_client = config.new_client_from_config()

dyn_client = DynamicClient(k8s_client)
nodes = dyn_client.resources.get(api_version='v1', kind='Node')

if args.kubeconfig:
cmd = ["oc get infrastructures.config.openshift.io cluster -o jsonpath={.status.infrastructureName} --kubeconfig " + args.kubeconfig]
else:
cmd = ["oc get infrastructures.config.openshift.io cluster -o jsonpath={.status.infrastructureName}"]

process = subprocess.Popen(cmd, stdout=subprocess.PIPE, stderr=subprocess.PIPE, shell=True)
stdout,stderr = process.communicate()
clustername = stdout.decode("utf-8")

# AWS configuration
_aws_config(nodes,clustername,args.jsonfile)
_aws_config(clustername,args.jsonfile,args.kubeconfig)


if __name__ == '__main__':
Expand Down
4 changes: 3 additions & 1 deletion dags/openshift_nightlies/tasks/install/rosa/rosa.py
Original file line number Diff line number Diff line change
Expand Up @@ -4,6 +4,7 @@

from openshift_nightlies.util import var_loader, kubeconfig, constants, executor
from openshift_nightlies.tasks.install.openshift import AbstractOpenshiftInstaller
from openshift_nightlies.tasks.utils import rosa_post_install
from common.models.dag_config import DagConfig
from openshift_nightlies.models.release import OpenshiftRelease

Expand All @@ -22,6 +23,7 @@ class RosaInstaller(AbstractOpenshiftInstaller):
def __init__(self, dag, config: DagConfig, release: OpenshiftRelease):
super().__init__(dag, config, release)
self.exec_config = executor.get_default_executor_config(self.dag_config, executor_image="airflow-managed-services")
self.rosa_postinstall_setup = rosa_post_install.Diagnosis(dag, config, release)

def get_type(self):
if self.config['rosa_hcp'] == "true":
Expand All @@ -32,7 +34,7 @@ def get_type(self):
def get_install_hcp_task(self):
for iteration in range(self.config['number_of_hostedcluster']):
c_id = f"{'hcp-'+str(iteration+1)}" # adding 1 to name the cluster hcp-1, hcp-2..
yield c_id, self._get_task(operation="install", id=c_id), self._get_task(operation="cleanup", id=c_id)
yield c_id, self._get_task(operation="install", id=c_id), self.rosa_postinstall_setup._get_rosa_postinstallation(id=c_id), self._get_task(operation="cleanup", id=c_id)

# Create Airflow Task for Install/Cleanup steps
def _get_task(self, operation="install", id="", trigger_rule="all_success"):
Expand Down
8 changes: 4 additions & 4 deletions dags/openshift_nightlies/tasks/utils/rosa_post_install.py
Original file line number Diff line number Diff line change
Expand Up @@ -41,11 +41,11 @@ def __init__(self, dag, config: DagConfig, release: OpenshiftRelease):

super().__init__()

self.exec_config = executor.get_executor_config_with_cluster_access(self.config, self.release, executor_image="airflow-managed-services")

def _get_rosa_postinstallation(self, operation="postinstall", trigger_rule="all_success"):
def _get_rosa_postinstallation(self, operation="postinstall", id="", trigger_rule="all_success"):
self.exec_config = executor.get_executor_config_with_cluster_access(self.config, self.release, executor_image="airflow-managed-services", task_group=id)
task_prefix=f"{id}-"
return BashOperator(
task_id=f"{operation}_rosa",
task_id=f"{task_prefix if id != '' else ''}{operation}-rosa",
depends_on_past=False,
bash_command=f"python {constants.root_dag_dir}/scripts/utils/rosa_post_install.py --jsonfile /tmp/{self.release_name}-postinstall-task.json --kubeconfig /home/airflow/auth/config",
retries=3,
Expand Down

0 comments on commit f984ea7

Please sign in to comment.